diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index 9efb9c8b498aa..54be022ce236b 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -56,7 +56,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["8.16.3", "8.17.1", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.4", "8.17.2", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index b1e5a7bf933c9..aded97712d7a5 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -287,8 +287,8 @@ steps: env: BWC_VERSION: 8.15.5 - - label: "{{matrix.image}} / 8.16.3 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.3 + - label: "{{matrix.image}} / 8.16.4 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.16.4 timeout_in_minutes: 300 matrix: setup: @@ -301,10 +301,10 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.16.3 + BWC_VERSION: 8.16.4 - - label: "{{matrix.image}} / 8.17.1 / packaging-tests-upgrade" - command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.1 + - label: "{{matrix.image}} / 8.17.2 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.17.2 timeout_in_minutes: 300 matrix: setup: @@ -317,7 +317,7 @@ steps: machineType: custom-16-32768 buildDirectory: /dev/shm/bk env: - BWC_VERSION: 8.17.1 + BWC_VERSION: 8.17.2 - label: "{{matrix.image}} / 8.18.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.18.0 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 4c593bae62d7a..64c4d59fd7fbe 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -306,8 +306,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.16.3 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.3#bwcTest + - label: 8.16.4 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.16.4#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -316,7 +316,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.16.3 + BWC_VERSION: 8.16.4 retry: automatic: - exit_status: "-1" @@ -325,8 +325,8 @@ steps: - signal_reason: agent_stop limit: 3 - - label: 8.17.1 / bwc - command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.17.1#bwcTest + - label: 8.17.2 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.17.2#bwcTest timeout_in_minutes: 300 agents: provider: gcp @@ -335,7 +335,7 @@ steps: buildDirectory: /dev/shm/bk preemptible: true env: - BWC_VERSION: 8.17.1 + BWC_VERSION: 8.17.2 retry: automatic: - exit_status: "-1" @@ -448,7 +448,7 @@ steps: setup: ES_RUNTIME_JAVA: - openjdk21 - BWC_VERSION: ["8.16.3", "8.17.1", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.4", "8.17.2", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 @@ -490,7 +490,7 @@ steps: ES_RUNTIME_JAVA: - openjdk21 - openjdk23 - BWC_VERSION: ["8.16.3", "8.17.1", "8.18.0", "9.0.0"] + BWC_VERSION: ["8.16.4", "8.17.2", "8.18.0", "9.0.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/scripts/dra-workflow.sh b/.buildkite/scripts/dra-workflow.sh index ff536bbf554ef..d6bc3063fab75 100755 --- a/.buildkite/scripts/dra-workflow.sh +++ b/.buildkite/scripts/dra-workflow.sh @@ -22,6 +22,7 @@ if [[ "$BRANCH" == "main" ]]; then fi ES_VERSION=$(grep elasticsearch build-tools-internal/version.properties | sed "s/elasticsearch *= *//g") +BASE_VERSION="$ES_VERSION" echo "ES_VERSION=$ES_VERSION" VERSION_SUFFIX="" @@ -29,8 +30,8 @@ if [[ "$WORKFLOW" == "snapshot" ]]; then VERSION_SUFFIX="-SNAPSHOT" fi -if [[ -n "${VERSION_QUALIFER:-}" ]]; then - ES_VERSION = "${ES_VERSION}-${VERSION_QUALIFER}" +if [[ -n "${VERSION_QUALIFIER:-}" ]]; then + ES_VERSION="${ES_VERSION}-${VERSION_QUALIFIER}" echo "Version qualifier specified. ES_VERSION=${ES_VERSION}." fi @@ -53,8 +54,8 @@ if [[ "$WORKFLOW" == "staging" ]]; then BUILD_SNAPSHOT_ARG="-Dbuild.snapshot=false" fi -if [[ -n "${VERSION_QUALIFER:-}" ]]; then - VERSION_QUALIFIER_ARG="-Dbuild.version_qualifier=$VERSION_QUALIFER" +if [[ -n "${VERSION_QUALIFIER:-}" ]]; then + VERSION_QUALIFIER_ARG="-Dbuild.version_qualifier=$VERSION_QUALIFIER" fi echo --- Building release artifacts @@ -72,10 +73,10 @@ echo --- Building release artifacts :distribution:generateDependenciesReport PATH="$PATH:${JAVA_HOME}/bin" # Required by the following script -if [[ -z "${VERSION_QUALIFER:-}" ]]; then +if [[ -z "${VERSION_QUALIFIER:-}" ]]; then x-pack/plugin/sql/connectors/tableau/package.sh asm qualifier="$VERSION_SUFFIX" else -x-pack/plugin/sql/connectors/tableau/package.sh asm qualifier="$VERSION_QUALIFER" +x-pack/plugin/sql/connectors/tableau/package.sh asm qualifier="-$VERSION_QUALIFIER" fi # we regenerate this file as part of the release manager invocation @@ -103,8 +104,8 @@ docker run --rm \ --branch "$RM_BRANCH" \ --commit "$BUILDKITE_COMMIT" \ --workflow "$WORKFLOW" \ - --qualifier "${VERSION_QUALIFER:-}" \ - --version "$ES_VERSION" \ + --qualifier "${VERSION_QUALIFIER:-}" \ + --version "$BASE_VERSION" \ --artifact-set main \ --dependency "beats:https://artifacts-${WORKFLOW}.elastic.co/beats/${BEATS_BUILD_ID}/manifest-${ES_VERSION}${VERSION_SUFFIX}.json" \ --dependency "ml-cpp:https://artifacts-${WORKFLOW}.elastic.co/ml-cpp/${ML_CPP_BUILD_ID}/manifest-${ES_VERSION}${VERSION_SUFFIX}.json" diff --git a/.ci/bwcVersions b/.ci/bwcVersions index cf12ee8c15419..9f4b86ffc7ada 100644 --- a/.ci/bwcVersions +++ b/.ci/bwcVersions @@ -15,7 +15,7 @@ BWC_VERSION: - "8.13.4" - "8.14.3" - "8.15.5" - - "8.16.3" - - "8.17.1" + - "8.16.4" + - "8.17.2" - "8.18.0" - "9.0.0" diff --git a/.ci/snapshotBwcVersions b/.ci/snapshotBwcVersions index 68c6ad5601546..38104e03edb5f 100644 --- a/.ci/snapshotBwcVersions +++ b/.ci/snapshotBwcVersions @@ -1,5 +1,5 @@ BWC_VERSION: - - "8.16.3" - - "8.17.1" + - "8.16.4" + - "8.17.2" - "8.18.0" - "9.0.0" diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaModulePathPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaModulePathPlugin.java index 90a053b2ec3d6..ec4663bf9fda2 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaModulePathPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaModulePathPlugin.java @@ -53,7 +53,7 @@ public void apply(Project project) { } // List of root tasks, by name, whose compileJava task should not use the module path. These are test related sources. - static final Set EXCLUDES = Set.of(":test:framework", ":x-pack:plugin:eql:qa:common"); + static final Set EXCLUDES = Set.of(":test:framework", ":x-pack:plugin:eql:qa:common", ":x-pack:plugin:esql:compute:test"); void configureCompileModulePath(Project project) { // first disable Gradle's builtin module path inference diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java index d4d40e697470e..06d76f125efdb 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java @@ -171,8 +171,8 @@ private static Stream maybeAttachEntitlementAgent(boolean useEntitlement throw new IllegalStateException("Failed to list entitlement jars in: " + dir, e); } // We instrument classes in these modules to call the bridge. Because the bridge gets patched - // into java.base, we must export the bridge from java.base to these modules. - String modulesContainingEntitlementInstrumentation = "java.logging"; + // into java.base, we must export the bridge from java.base to these modules, as a comma-separated list + String modulesContainingEntitlementInstrumentation = "java.logging,java.net.http,java.naming"; return Stream.of( "-Des.entitlements.enabled=true", "-XX:+EnableDynamicAgentLoading", diff --git a/docs/changelog/117469.yaml b/docs/changelog/117469.yaml deleted file mode 100644 index cfb14f78cb578..0000000000000 --- a/docs/changelog/117469.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 117469 -summary: Handle exceptions in query phase can match -area: Search -type: bug -issues: - - 104994 diff --git a/docs/changelog/117840.yaml b/docs/changelog/117840.yaml deleted file mode 100644 index e1f469643af42..0000000000000 --- a/docs/changelog/117840.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117840 -summary: Fix timeout ingesting an empty string into a `semantic_text` field -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/117851.yaml b/docs/changelog/117851.yaml deleted file mode 100644 index 21888cd6fb80f..0000000000000 --- a/docs/changelog/117851.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 117851 -summary: Addition of `tier_preference`, `creation_date` and `version` fields in Elasticsearch monitoring template -area: Monitoring -type: enhancement -issues: [] diff --git a/docs/changelog/118454.yaml b/docs/changelog/118454.yaml deleted file mode 100644 index 9a19ede64d705..0000000000000 --- a/docs/changelog/118454.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 118454 -summary: Fix RLIKE folding with (unsupported) case insensitive pattern -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/118516.yaml b/docs/changelog/118516.yaml deleted file mode 100644 index 8a618a6d6cfd7..0000000000000 --- a/docs/changelog/118516.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 118435 -summary: Fix moving function linear weighted avg -area: Aggregations -type: bug -issues: - - 113751 diff --git a/docs/changelog/118603.yaml b/docs/changelog/118603.yaml deleted file mode 100644 index d61619adfa5f6..0000000000000 --- a/docs/changelog/118603.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 118603 -summary: Allow DATE_PARSE to read the timezones -area: ES|QL -type: bug -issues: - - 117680 diff --git a/docs/changelog/118619.yaml b/docs/changelog/118619.yaml new file mode 100644 index 0000000000000..824d1511606de --- /dev/null +++ b/docs/changelog/118619.yaml @@ -0,0 +1,5 @@ +pr: 118619 +summary: Optional named arguments for function in map +area: EQL +type: enhancement +issues: [] diff --git a/docs/changelog/118757.yaml b/docs/changelog/118757.yaml deleted file mode 100644 index 956e220f21aeb..0000000000000 --- a/docs/changelog/118757.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 118757 -summary: Improve handling of nested fields in index reader wrappers -area: Authorization -type: enhancement -issues: [] diff --git a/docs/changelog/118816.yaml b/docs/changelog/118816.yaml deleted file mode 100644 index f1c1eac90dbcf..0000000000000 --- a/docs/changelog/118816.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 118816 -summary: Support flattened field with downsampling -area: Downsampling -type: bug -issues: - - 116319 diff --git a/docs/changelog/118837.yaml b/docs/changelog/118837.yaml deleted file mode 100644 index 38cd32f3a3513..0000000000000 --- a/docs/changelog/118837.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 118837 -summary: Add missing timeouts to rest-api-spec ILM APIs -area: "ILM+SLM" -type: bug -issues: [] diff --git a/docs/changelog/118844.yaml b/docs/changelog/118844.yaml deleted file mode 100644 index f9f92bcaeb8cb..0000000000000 --- a/docs/changelog/118844.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 118844 -summary: Add missing timeouts to rest-api-spec ingest APIs -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/118919.yaml b/docs/changelog/118919.yaml deleted file mode 100644 index 832fd86fe08ba..0000000000000 --- a/docs/changelog/118919.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 118919 -summary: Remove unsupported timeout from rest-api-spec license API -area: License -type: bug -issues: [] diff --git a/docs/changelog/118921.yaml b/docs/changelog/118921.yaml deleted file mode 100644 index bd341616d8a14..0000000000000 --- a/docs/changelog/118921.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 118921 -summary: Add missing timeouts to rest-api-spec shutdown APIs -area: Infra/Node Lifecycle -type: bug -issues: [] diff --git a/docs/changelog/118954.yaml b/docs/changelog/118954.yaml deleted file mode 100644 index ab2f2cda5c11e..0000000000000 --- a/docs/changelog/118954.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 118954 -summary: Add missing parameter to `xpack.info` rest-api-spec -area: Infra/REST API -type: bug -issues: [] diff --git a/docs/changelog/118958.yaml b/docs/changelog/118958.yaml deleted file mode 100644 index fb0fd6388ab61..0000000000000 --- a/docs/changelog/118958.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 118958 -summary: Add missing timeouts to rest-api-spec SLM APIs -area: ILM+SLM -type: bug -issues: [] diff --git a/docs/changelog/118999.yaml b/docs/changelog/118999.yaml deleted file mode 100644 index 0188cebbd7685..0000000000000 --- a/docs/changelog/118999.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 118999 -summary: Fix loss of context in the inference API for streaming APIs -area: Machine Learning -type: bug -issues: - - 119000 diff --git a/docs/changelog/119131.yaml b/docs/changelog/119131.yaml deleted file mode 100644 index 2628b6184f90d..0000000000000 --- a/docs/changelog/119131.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 119131 -summary: Expose BwC enrich cache setting in plugin -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/119134.yaml b/docs/changelog/119134.yaml deleted file mode 100644 index c4aefac91c701..0000000000000 --- a/docs/changelog/119134.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 119134 -summary: Handle `index.mapping.ignore_malformed` in downsampling -area: Downsampling -type: bug -issues: - - 119075 diff --git a/docs/changelog/119233.yaml b/docs/changelog/119233.yaml deleted file mode 100644 index ef89c011ce4f6..0000000000000 --- a/docs/changelog/119233.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 119233 -summary: Fixing `GetDatabaseConfigurationAction` response serialization -area: Ingest Node -type: bug -issues: [] diff --git a/docs/changelog/119449.yaml b/docs/changelog/119449.yaml deleted file mode 100644 index f02bfa6d16d60..0000000000000 --- a/docs/changelog/119449.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 119449 -summary: Add missing traces ilm policy for OTel traces data streams -area: Data streams -type: bug -issues: [] diff --git a/docs/changelog/119495.yaml b/docs/changelog/119495.yaml deleted file mode 100644 index b3e8f7e79d984..0000000000000 --- a/docs/changelog/119495.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 119495 -summary: Add mapping for `event_name` for OTel logs -area: Data streams -type: enhancement -issues: [] diff --git a/docs/changelog/119516.yaml b/docs/changelog/119516.yaml deleted file mode 100644 index 06dd5168a0823..0000000000000 --- a/docs/changelog/119516.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 119516 -summary: "Fix: do not let `_resolve/cluster` hang if remote is unresponsive" -area: Search -type: bug -issues: [] diff --git a/docs/changelog/119536.yaml b/docs/changelog/119536.yaml new file mode 100644 index 0000000000000..e4b0fca2bd8db --- /dev/null +++ b/docs/changelog/119536.yaml @@ -0,0 +1,5 @@ +pr: 119536 +summary: Fix ROUND() with unsigned longs throwing in some edge cases +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/119580.yaml b/docs/changelog/119580.yaml new file mode 100644 index 0000000000000..ba437d2691c48 --- /dev/null +++ b/docs/changelog/119580.yaml @@ -0,0 +1,5 @@ +pr: 119580 +summary: Do not serialize `EsIndex` in plan +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/119637.yaml b/docs/changelog/119637.yaml deleted file mode 100644 index c2fd6dc51f068..0000000000000 --- a/docs/changelog/119637.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 119637 -summary: Fix spike detection for short spikes at the tail of the data -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/119743.yaml b/docs/changelog/119743.yaml new file mode 100644 index 0000000000000..b6f53c0dd1aed --- /dev/null +++ b/docs/changelog/119743.yaml @@ -0,0 +1,5 @@ +pr: 119743 +summary: POC mark read-only +area: Engine +type: enhancement +issues: [] diff --git a/docs/changelog/119750.yaml b/docs/changelog/119750.yaml deleted file mode 100644 index 2ec5c298d0eb1..0000000000000 --- a/docs/changelog/119750.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 119750 -summary: "ESQL: `connect_transport_exception` should be thrown instead of `verification_exception`\ - \ when ENRICH-ing if remote is disconnected" -area: Search -type: bug -issues: [] diff --git a/docs/changelog/119793.yaml b/docs/changelog/119793.yaml deleted file mode 100644 index 80330c25c2f30..0000000000000 --- a/docs/changelog/119793.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 119793 -summary: Resolve/cluster should mark remotes as not connected when a security exception - is thrown -area: CCS -type: bug -issues: [] diff --git a/docs/changelog/119797.yaml b/docs/changelog/119797.yaml deleted file mode 100644 index 992c2078e0caa..0000000000000 --- a/docs/changelog/119797.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 119797 -summary: "[Inference API] Fix bug checking for e5 or reranker default IDs" -area: Machine Learning -type: bug -issues: [] diff --git a/docs/changelog/119897.yaml b/docs/changelog/119897.yaml deleted file mode 100644 index 87c5890f9fde1..0000000000000 --- a/docs/changelog/119897.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 119897 -summary: Fix ESQL async get while task is being cancelled -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/120192.yaml b/docs/changelog/120192.yaml new file mode 100644 index 0000000000000..6a40fa628d373 --- /dev/null +++ b/docs/changelog/120192.yaml @@ -0,0 +1,5 @@ +pr: 120192 +summary: Extend `TranslationAware` to all pushable expressions +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/120244.yaml b/docs/changelog/120244.yaml new file mode 100644 index 0000000000000..86c21a8a30332 --- /dev/null +++ b/docs/changelog/120244.yaml @@ -0,0 +1,5 @@ +pr: 120244 +summary: Ignore closed indices for reindex +area: Data streams +type: enhancement +issues: [] diff --git a/docs/changelog/120250.yaml b/docs/changelog/120250.yaml new file mode 100644 index 0000000000000..5df5bfa7d04ed --- /dev/null +++ b/docs/changelog/120250.yaml @@ -0,0 +1,6 @@ +pr: 120250 +summary: "Retry internally when CAS upload is throttled [GCS]" +area: Snapshot/Restore +type: enhancement +issues: + - 116546 diff --git a/docs/changelog/120271.yaml b/docs/changelog/120271.yaml new file mode 100644 index 0000000000000..cb252b60cd197 --- /dev/null +++ b/docs/changelog/120271.yaml @@ -0,0 +1,5 @@ +pr: 120271 +summary: Optimize indexing points with index and doc values set to true +area: Geo +type: enhancement +issues: [] diff --git a/docs/changelog/120354.yaml b/docs/changelog/120354.yaml new file mode 100644 index 0000000000000..46bdf57327330 --- /dev/null +++ b/docs/changelog/120354.yaml @@ -0,0 +1,5 @@ +pr: 120354 +summary: Move scoring in ES|QL out of snapshot +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/120370.yaml b/docs/changelog/120370.yaml new file mode 100644 index 0000000000000..0e1a900488b60 --- /dev/null +++ b/docs/changelog/120370.yaml @@ -0,0 +1,6 @@ +pr: 120370 +summary: "Merge field mappers when updating mappings with [subobjects:false]" +area: Mapping +type: bug +issues: + - 120216 diff --git a/docs/changelog/120458.yaml b/docs/changelog/120458.yaml new file mode 100644 index 0000000000000..1ba286a1bdbe9 --- /dev/null +++ b/docs/changelog/120458.yaml @@ -0,0 +1,5 @@ +pr: 120458 +summary: Do not recommend increasing `max_shards_per_node` +area: Health +type: bug +issues: [] diff --git a/docs/changelog/120538.yaml b/docs/changelog/120538.yaml new file mode 100644 index 0000000000000..dfd7e33f4be29 --- /dev/null +++ b/docs/changelog/120538.yaml @@ -0,0 +1,5 @@ +pr: 120538 +summary: "Revert unwanted ES|QL lexer changes from PR #120354" +area: ES|QL +type: bug +issues: [] diff --git a/docs/reference/esql/functions/kibana/definition/like.json b/docs/reference/esql/functions/kibana/definition/like.json index ea6258f822c8c..2fcb29622efbd 100644 --- a/docs/reference/esql/functions/kibana/definition/like.json +++ b/docs/reference/esql/functions/kibana/definition/like.json @@ -32,7 +32,7 @@ }, { "name" : "pattern", - "type" : "text", + "type" : "keyword", "optional" : false, "description" : "Pattern." } diff --git a/docs/reference/esql/functions/kibana/definition/mv_append.json b/docs/reference/esql/functions/kibana/definition/mv_append.json index 81c1b777be498..043625d9ea1e7 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_append.json +++ b/docs/reference/esql/functions/kibana/definition/mv_append.json @@ -76,6 +76,24 @@ "variadic" : false, "returnType" : "date" }, + { + "params" : [ + { + "name" : "field1", + "type" : "date_nanos", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "date_nanos", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "date_nanos" + }, { "params" : [ { @@ -184,6 +202,24 @@ "variadic" : false, "returnType" : "keyword" }, + { + "params" : [ + { + "name" : "field1", + "type" : "keyword", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "text", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, { "params" : [ { @@ -202,6 +238,24 @@ "variadic" : false, "returnType" : "long" }, + { + "params" : [ + { + "name" : "field1", + "type" : "text", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "keyword", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "keyword" + }, { "params" : [ { @@ -220,6 +274,24 @@ "variadic" : false, "returnType" : "keyword" }, + { + "params" : [ + { + "name" : "field1", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + }, + { + "name" : "field2", + "type" : "unsigned_long", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json index ce2c96dbc1757..2fb5b9c61727f 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_dedupe.json +++ b/docs/reference/esql/functions/kibana/definition/mv_dedupe.json @@ -161,6 +161,18 @@ "variadic" : false, "returnType" : "keyword" }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "Multivalue expression." + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/mv_slice.json b/docs/reference/esql/functions/kibana/definition/mv_slice.json index df4d48145fac6..5ad8f588cdc2b 100644 --- a/docs/reference/esql/functions/kibana/definition/mv_slice.json +++ b/docs/reference/esql/functions/kibana/definition/mv_slice.json @@ -316,6 +316,30 @@ "variadic" : false, "returnType" : "keyword" }, + { + "params" : [ + { + "name" : "field", + "type" : "unsigned_long", + "optional" : false, + "description" : "Multivalue expression. If `null`, the function returns `null`." + }, + { + "name" : "start", + "type" : "integer", + "optional" : false, + "description" : "Start position. If `null`, the function returns `null`. The start argument can be negative. An index of -1 is used to specify the last value in the list." + }, + { + "name" : "end", + "type" : "integer", + "optional" : true, + "description" : "End position(included). Optional; if omitted, the position at `start` is returned. The end argument can be negative. An index of -1 is used to specify the last value in the list." + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, { "params" : [ { diff --git a/docs/reference/esql/functions/kibana/definition/rlike.json b/docs/reference/esql/functions/kibana/definition/rlike.json index 994748e74d4f8..47cbd7800c821 100644 --- a/docs/reference/esql/functions/kibana/definition/rlike.json +++ b/docs/reference/esql/functions/kibana/definition/rlike.json @@ -32,7 +32,7 @@ }, { "name" : "pattern", - "type" : "text", + "type" : "keyword", "optional" : false, "description" : "A regular expression." } diff --git a/docs/reference/esql/functions/kibana/definition/round.json b/docs/reference/esql/functions/kibana/definition/round.json index 4f4ddd36daa05..4ef20aa162b42 100644 --- a/docs/reference/esql/functions/kibana/definition/round.json +++ b/docs/reference/esql/functions/kibana/definition/round.json @@ -34,6 +34,24 @@ "variadic" : false, "returnType" : "double" }, + { + "params" : [ + { + "name" : "number", + "type" : "double", + "optional" : false, + "description" : "The numeric value to round. If `null`, the function returns `null`." + }, + { + "name" : "decimals", + "type" : "long", + "optional" : true, + "description" : "The number of decimal places to round to. Defaults to 0. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "double" + }, { "params" : [ { @@ -64,6 +82,24 @@ "variadic" : false, "returnType" : "integer" }, + { + "params" : [ + { + "name" : "number", + "type" : "integer", + "optional" : false, + "description" : "The numeric value to round. If `null`, the function returns `null`." + }, + { + "name" : "decimals", + "type" : "long", + "optional" : true, + "description" : "The number of decimal places to round to. Defaults to 0. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "integer" + }, { "params" : [ { @@ -94,6 +130,24 @@ "variadic" : false, "returnType" : "long" }, + { + "params" : [ + { + "name" : "number", + "type" : "long", + "optional" : false, + "description" : "The numeric value to round. If `null`, the function returns `null`." + }, + { + "name" : "decimals", + "type" : "long", + "optional" : true, + "description" : "The number of decimal places to round to. Defaults to 0. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "long" + }, { "params" : [ { @@ -105,6 +159,42 @@ ], "variadic" : false, "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "The numeric value to round. If `null`, the function returns `null`." + }, + { + "name" : "decimals", + "type" : "integer", + "optional" : true, + "description" : "The number of decimal places to round to. Defaults to 0. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "unsigned_long" + }, + { + "params" : [ + { + "name" : "number", + "type" : "unsigned_long", + "optional" : false, + "description" : "The numeric value to round. If `null`, the function returns `null`." + }, + { + "name" : "decimals", + "type" : "long", + "optional" : true, + "description" : "The number of decimal places to round to. Defaults to 0. If `null`, the function returns `null`." + } + ], + "variadic" : false, + "returnType" : "unsigned_long" } ], "examples" : [ diff --git a/docs/reference/esql/functions/types/like.asciidoc b/docs/reference/esql/functions/types/like.asciidoc index 46532f2af3bf3..fffa6dc0b8371 100644 --- a/docs/reference/esql/functions/types/like.asciidoc +++ b/docs/reference/esql/functions/types/like.asciidoc @@ -6,5 +6,5 @@ |=== str | pattern | result keyword | keyword | boolean -text | text | boolean +text | keyword | boolean |=== diff --git a/docs/reference/esql/functions/types/mv_append.asciidoc b/docs/reference/esql/functions/types/mv_append.asciidoc index 05f9ff6b19f9e..c5e87f78d4051 100644 --- a/docs/reference/esql/functions/types/mv_append.asciidoc +++ b/docs/reference/esql/functions/types/mv_append.asciidoc @@ -9,13 +9,17 @@ boolean | boolean | boolean cartesian_point | cartesian_point | cartesian_point cartesian_shape | cartesian_shape | cartesian_shape date | date | date +date_nanos | date_nanos | date_nanos double | double | double geo_point | geo_point | geo_point geo_shape | geo_shape | geo_shape integer | integer | integer ip | ip | ip keyword | keyword | keyword +keyword | text | keyword long | long | long +text | keyword | keyword text | text | keyword +unsigned_long | unsigned_long | unsigned_long version | version | version |=== diff --git a/docs/reference/esql/functions/types/mv_dedupe.asciidoc b/docs/reference/esql/functions/types/mv_dedupe.asciidoc index 1524ec86cd5ec..e68af2f992b43 100644 --- a/docs/reference/esql/functions/types/mv_dedupe.asciidoc +++ b/docs/reference/esql/functions/types/mv_dedupe.asciidoc @@ -18,5 +18,6 @@ ip | ip keyword | keyword long | long text | keyword +unsigned_long | unsigned_long version | version |=== diff --git a/docs/reference/esql/functions/types/mv_slice.asciidoc b/docs/reference/esql/functions/types/mv_slice.asciidoc index 75f45e333ee0c..ed65d227c8d92 100644 --- a/docs/reference/esql/functions/types/mv_slice.asciidoc +++ b/docs/reference/esql/functions/types/mv_slice.asciidoc @@ -18,5 +18,6 @@ ip | integer | integer | ip keyword | integer | integer | keyword long | integer | integer | long text | integer | integer | keyword +unsigned_long | integer | integer | unsigned_long version | integer | integer | version |=== diff --git a/docs/reference/esql/functions/types/rlike.asciidoc b/docs/reference/esql/functions/types/rlike.asciidoc index 46532f2af3bf3..fffa6dc0b8371 100644 --- a/docs/reference/esql/functions/types/rlike.asciidoc +++ b/docs/reference/esql/functions/types/rlike.asciidoc @@ -6,5 +6,5 @@ |=== str | pattern | result keyword | keyword | boolean -text | text | boolean +text | keyword | boolean |=== diff --git a/docs/reference/esql/functions/types/round.asciidoc b/docs/reference/esql/functions/types/round.asciidoc index 2c0fe768741f6..9b102edf6ed94 100644 --- a/docs/reference/esql/functions/types/round.asciidoc +++ b/docs/reference/esql/functions/types/round.asciidoc @@ -6,10 +6,15 @@ |=== number | decimals | result double | integer | double +double | long | double double | | double integer | integer | integer +integer | long | integer integer | | integer long | integer | long +long | long | long long | | long +unsigned_long | integer | unsigned_long +unsigned_long | long | unsigned_long unsigned_long | | unsigned_long |=== diff --git a/docs/reference/esql/metadata-fields.asciidoc b/docs/reference/esql/metadata-fields.asciidoc index 66046b1b0091f..9617c06cb27c8 100644 --- a/docs/reference/esql/metadata-fields.asciidoc +++ b/docs/reference/esql/metadata-fields.asciidoc @@ -20,6 +20,8 @@ supported ones are: * <>: the ignored source document fields. The field is of the type <>. + * `_score`: when enabled, the final score assigned to each row matching an ES|QL query. Scoring will be updated when using <>. + To enable the access to these fields, the <> source command needs to be provided with a dedicated directive: diff --git a/docs/reference/inference/inference-apis.asciidoc b/docs/reference/inference/inference-apis.asciidoc index 4f27409973ca2..6c97f388788f7 100644 --- a/docs/reference/inference/inference-apis.asciidoc +++ b/docs/reference/inference/inference-apis.asciidoc @@ -16,10 +16,8 @@ models or if you want to use non-NLP models, use the For the most up-to-date API details, refer to {api-es}/group/endpoint-inference[{infer-cap} APIs]. -- -The {infer} APIs enable you to create {infer} endpoints and use {ml} models of -different providers - such as Amazon Bedrock, Anthropic, Azure AI Studio, -Cohere, Google AI, Mistral, OpenAI, or HuggingFace - as a service. Use -the following APIs to manage {infer} models and perform {infer}: +The {infer} APIs enable you to create {infer} endpoints and integrate with {ml} models of different services - such as Amazon Bedrock, Anthropic, Azure AI Studio, Cohere, Google AI, Mistral, OpenAI, or HuggingFace. +Use the following APIs to manage {infer} models and perform {infer}: * <> * <> @@ -37,10 +35,8 @@ An {infer} endpoint enables you to use the corresponding {ml} model without manual deployment and apply it to your data at ingestion time through <>. -Choose a model from your provider or use ELSER – a retrieval model trained by -Elastic –, then create an {infer} endpoint by the <>. -Now use <> to perform -<> on your data. +Choose a model from your service or use ELSER – a retrieval model trained by Elastic –, then create an {infer} endpoint by the <>. +Now use <> to perform <> on your data. [discrete] [[adaptive-allocations]] diff --git a/docs/reference/inference/put-inference.asciidoc b/docs/reference/inference/put-inference.asciidoc index da07d1d3e7d84..4e149667d6298 100644 --- a/docs/reference/inference/put-inference.asciidoc +++ b/docs/reference/inference/put-inference.asciidoc @@ -42,7 +42,7 @@ include::inference-shared.asciidoc[tag=inference-id] include::inference-shared.asciidoc[tag=task-type] + -- -Refer to the service list in the <> for the available task types. +Refer to the integration list in the <> for the available task types. -- @@ -54,15 +54,15 @@ The create {infer} API enables you to create an {infer} endpoint and configure a [IMPORTANT] ==== -* When creating an inference endpoint, the associated machine learning model is automatically deployed if it is not already running. +* When creating an {infer} endpoint, the associated {ml} model is automatically deployed if it is not already running. * After creating the endpoint, wait for the model deployment to complete before using it. You can verify the deployment status by using the <> API. In the response, look for `"state": "fully_allocated"` and ensure the `"allocation_count"` matches the `"target_allocation_count"`. * Avoid creating multiple endpoints for the same model unless required, as each endpoint consumes significant resources. ==== -The following services are available through the {infer} API. -You can find the available task types next to the service name. -Click the links to review the configuration details of the services: +The following integrations are available through the {infer} API. +You can find the available task types next to the integration name. +Click the links to review the configuration details of the integrations: * <> (`completion`, `rerank`, `sparse_embedding`, `text_embedding`) * <> (`completion`, `text_embedding`) @@ -80,14 +80,14 @@ Click the links to review the configuration details of the services: * <> (`text_embedding`) * <> (`text_embedding`, `rerank`) -The {es} and ELSER services run on a {ml} node in your {es} cluster. The rest of -the services connect to external providers. +The {es} and ELSER services run on a {ml} node in your {es} cluster. +The rest of the integrations connect to external services. [discrete] [[adaptive-allocations-put-inference]] ==== Adaptive allocations -Adaptive allocations allow inference services to dynamically adjust the number of model allocations based on the current load. +Adaptive allocations allow inference endpoints to dynamically adjust the number of model allocations based on the current load. When adaptive allocations are enabled: diff --git a/docs/reference/inference/service-alibabacloud-ai-search.asciidoc b/docs/reference/inference/service-alibabacloud-ai-search.asciidoc index 5a11190549ce6..eea0e094dce5a 100644 --- a/docs/reference/inference/service-alibabacloud-ai-search.asciidoc +++ b/docs/reference/inference/service-alibabacloud-ai-search.asciidoc @@ -1,5 +1,5 @@ [[infer-service-alibabacloud-ai-search]] -=== AlibabaCloud AI Search {infer} service +=== AlibabaCloud AI Search {infer} integration .New API reference [sidebar] diff --git a/docs/reference/inference/service-amazon-bedrock.asciidoc b/docs/reference/inference/service-amazon-bedrock.asciidoc index ed25ce0d515b5..d4ae3895b7c76 100644 --- a/docs/reference/inference/service-amazon-bedrock.asciidoc +++ b/docs/reference/inference/service-amazon-bedrock.asciidoc @@ -1,5 +1,5 @@ [[infer-service-amazon-bedrock]] -=== Amazon Bedrock {infer} service +=== Amazon Bedrock {infer} integration .New API reference [sidebar] diff --git a/docs/reference/inference/service-anthropic.asciidoc b/docs/reference/inference/service-anthropic.asciidoc index 4ce76dc1d57bd..08d8ca43daea8 100644 --- a/docs/reference/inference/service-anthropic.asciidoc +++ b/docs/reference/inference/service-anthropic.asciidoc @@ -1,5 +1,5 @@ [[infer-service-anthropic]] -=== Anthropic {infer} service +=== Anthropic {infer} integration .New API reference [sidebar] diff --git a/docs/reference/inference/service-azure-ai-studio.asciidoc b/docs/reference/inference/service-azure-ai-studio.asciidoc index 7ada8df1ecdaa..b179a87de1594 100644 --- a/docs/reference/inference/service-azure-ai-studio.asciidoc +++ b/docs/reference/inference/service-azure-ai-studio.asciidoc @@ -1,5 +1,5 @@ [[infer-service-azure-ai-studio]] -=== Azure AI studio {infer} service +=== Azure AI studio {infer} integration .New API reference [sidebar] diff --git a/docs/reference/inference/service-azure-openai.asciidoc b/docs/reference/inference/service-azure-openai.asciidoc index 170c0939166f7..eded44b7ab0b0 100644 --- a/docs/reference/inference/service-azure-openai.asciidoc +++ b/docs/reference/inference/service-azure-openai.asciidoc @@ -1,5 +1,5 @@ [[infer-service-azure-openai]] -=== Azure OpenAI {infer} service +=== Azure OpenAI {infer} integration .New API reference [sidebar] diff --git a/docs/reference/inference/service-cohere.asciidoc b/docs/reference/inference/service-cohere.asciidoc index 70e311c810cdd..e95f0810fd29d 100644 --- a/docs/reference/inference/service-cohere.asciidoc +++ b/docs/reference/inference/service-cohere.asciidoc @@ -1,5 +1,5 @@ [[infer-service-cohere]] -=== Cohere {infer} service +=== Cohere {infer} integration .New API reference [sidebar] diff --git a/docs/reference/inference/service-elasticsearch.asciidoc b/docs/reference/inference/service-elasticsearch.asciidoc index 8870fbed357a6..745b14904dd6d 100644 --- a/docs/reference/inference/service-elasticsearch.asciidoc +++ b/docs/reference/inference/service-elasticsearch.asciidoc @@ -1,5 +1,5 @@ [[infer-service-elasticsearch]] -=== Elasticsearch {infer} service +=== Elasticsearch {infer} integration .New API reference [sidebar] diff --git a/docs/reference/inference/service-elser.asciidoc b/docs/reference/inference/service-elser.asciidoc index 47aaa58814602..6a509ec850903 100644 --- a/docs/reference/inference/service-elser.asciidoc +++ b/docs/reference/inference/service-elser.asciidoc @@ -1,5 +1,5 @@ [[infer-service-elser]] -=== ELSER {infer} service +=== ELSER {infer} integration .New API reference [sidebar] diff --git a/docs/reference/inference/service-google-ai-studio.asciidoc b/docs/reference/inference/service-google-ai-studio.asciidoc index 5b30292fb9beb..a6f7d914decfa 100644 --- a/docs/reference/inference/service-google-ai-studio.asciidoc +++ b/docs/reference/inference/service-google-ai-studio.asciidoc @@ -1,5 +1,5 @@ [[infer-service-google-ai-studio]] -=== Google AI Studio {infer} service +=== Google AI Studio {infer} integration .New API reference [sidebar] diff --git a/docs/reference/inference/service-google-vertex-ai.asciidoc b/docs/reference/inference/service-google-vertex-ai.asciidoc index 28fa65b6e5fcc..f9499de7e5602 100644 --- a/docs/reference/inference/service-google-vertex-ai.asciidoc +++ b/docs/reference/inference/service-google-vertex-ai.asciidoc @@ -1,5 +1,5 @@ [[infer-service-google-vertex-ai]] -=== Google Vertex AI {infer} service +=== Google Vertex AI {infer} integration .New API reference [sidebar] diff --git a/docs/reference/inference/service-hugging-face.asciidoc b/docs/reference/inference/service-hugging-face.asciidoc index 862914c141740..40fb2002975dd 100644 --- a/docs/reference/inference/service-hugging-face.asciidoc +++ b/docs/reference/inference/service-hugging-face.asciidoc @@ -1,5 +1,5 @@ [[infer-service-hugging-face]] -=== HuggingFace {infer} service +=== HuggingFace {infer} integration .New API reference [sidebar] diff --git a/docs/reference/inference/service-jinaai.asciidoc b/docs/reference/inference/service-jinaai.asciidoc index 7c5aebe5bcf8e..1470c58315430 100644 --- a/docs/reference/inference/service-jinaai.asciidoc +++ b/docs/reference/inference/service-jinaai.asciidoc @@ -1,5 +1,5 @@ [[infer-service-jinaai]] -=== JinaAI {infer} service +=== JinaAI {infer} integration Creates an {infer} endpoint to perform an {infer} task with the `jinaai` service. diff --git a/docs/reference/inference/service-mistral.asciidoc b/docs/reference/inference/service-mistral.asciidoc index 326e8458be767..20e1133e8a83c 100644 --- a/docs/reference/inference/service-mistral.asciidoc +++ b/docs/reference/inference/service-mistral.asciidoc @@ -1,5 +1,5 @@ [[infer-service-mistral]] -=== Mistral {infer} service +=== Mistral {infer} integration .New API reference [sidebar] diff --git a/docs/reference/inference/service-openai.asciidoc b/docs/reference/inference/service-openai.asciidoc index 590f280b1c494..8d7c6c937333d 100644 --- a/docs/reference/inference/service-openai.asciidoc +++ b/docs/reference/inference/service-openai.asciidoc @@ -1,5 +1,5 @@ [[infer-service-openai]] -=== OpenAI {infer} service +=== OpenAI {infer} integration .New API reference [sidebar] diff --git a/docs/reference/inference/service-watsonx-ai.asciidoc b/docs/reference/inference/service-watsonx-ai.asciidoc index e7bba7b4e9a97..31d246a36d350 100644 --- a/docs/reference/inference/service-watsonx-ai.asciidoc +++ b/docs/reference/inference/service-watsonx-ai.asciidoc @@ -1,5 +1,5 @@ [[infer-service-watsonx-ai]] -=== Watsonx {infer} service +=== Watsonx {infer} integration .New API reference [sidebar] diff --git a/docs/reference/mapping/types/array.asciidoc b/docs/reference/mapping/types/array.asciidoc index 2ffb191caf48e..59115b2a76ce0 100644 --- a/docs/reference/mapping/types/array.asciidoc +++ b/docs/reference/mapping/types/array.asciidoc @@ -10,19 +10,16 @@ same data type. For instance: * an array of arrays: [ `1`, [ `2`, `3` ]] which is the equivalent of [ `1`, `2`, `3` ] * an array of objects: [ `{ "name": "Mary", "age": 12 }`, `{ "name": "John", "age": 10 }`] -.Arrays of objects +.Arrays with `object` field type vs `nested` type [NOTE] ==================================================== -Arrays of objects do not work as you would expect: you cannot query each -object independently of the other objects in the array. If you need to be -able to do this then you should use the <> data type instead -of the <> data type. +Arrays of objects in Elasticsearch do not behave as you would expect: queries may match fields across different objects in the array, leading to unexpected results. By default, arrays of objects are <> +during indexing. To ensure queries match values within the same object, use the <> data type instead of the <> data type. -This is explained in more detail in <>. +This behavior is explained in more detail in <>. ==================================================== - When adding a field dynamically, the first value in the array determines the field `type`. All subsequent values must be of the same data type or it must at least be possible to <> subsequent values to the same @@ -81,3 +78,10 @@ GET my-index-000001/_search <3> The second document contains no arrays, but can be indexed into the same fields. <4> The query looks for `elasticsearch` in the `tags` field, and matches both documents. +[TIP] +==== +You can modify arrays using the <>. +==== + + + diff --git a/docs/reference/mapping/types/dense-vector.asciidoc b/docs/reference/mapping/types/dense-vector.asciidoc index c16b979043a57..75fbaea59c6bd 100644 --- a/docs/reference/mapping/types/dense-vector.asciidoc +++ b/docs/reference/mapping/types/dense-vector.asciidoc @@ -121,11 +121,13 @@ The three following quantization strategies are supported: * `bbq` - experimental:[] Better binary quantization which reduces each dimension to a single bit precision. This reduces the memory footprint by 96% (or 32x) at a larger cost of accuracy. Generally, oversampling during query time and reranking can help mitigate the accuracy loss. -When using a quantized format, you may want to oversample and rescore the results to improve accuracy. See <> for more information. +When using a quantized format, you may want to oversample and rescore the results to improve accuracy. See <> for more information. To use a quantized index, you can set your index type to `int8_hnsw`, `int4_hnsw`, or `bbq_hnsw`. When indexing `float` vectors, the current default index type is `int8_hnsw`. +Quantized vectors can use <> to improve accuracy on approximate kNN search results. + NOTE: Quantization will continue to keep the raw float vector values on disk for reranking, reindexing, and quantization improvements over the lifetime of the data. This means disk usage will increase by ~25% for `int8`, ~12.5% for `int4`, and ~3.1% for `bbq` due to the overhead of storing the quantized and raw vectors. diff --git a/docs/reference/query-dsl/knn-query.asciidoc b/docs/reference/query-dsl/knn-query.asciidoc index 29ccadfa70a03..445c62bec82bd 100644 --- a/docs/reference/query-dsl/knn-query.asciidoc +++ b/docs/reference/query-dsl/knn-query.asciidoc @@ -137,6 +137,9 @@ documents are then scored according to <> and the provided `boost` is applied. -- +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-rescore-vector] + + `boost`:: + -- diff --git a/docs/reference/query-dsl/wildcard-query.asciidoc b/docs/reference/query-dsl/wildcard-query.asciidoc index 947b8d12f2a78..c2b4213a66866 100644 --- a/docs/reference/query-dsl/wildcard-query.asciidoc +++ b/docs/reference/query-dsl/wildcard-query.asciidoc @@ -81,6 +81,9 @@ the iterations needed to find matching terms and slow search performance. [[wildcard-query-notes]] ==== Notes + +Wildcard queries using `*` can be resource-intensive, particularly with leading wildcards. To improve performance, minimize their use and consider alternatives like the <>. While this allows for more efficient searching, it may increase index size. For better performance and accuracy, combine wildcard queries with other query types like <> or <> to first narrow down results. + ===== Allow expensive queries Wildcard queries will not be executed if <> is set to false. diff --git a/docs/reference/quickstart/aggs-tutorial.asciidoc b/docs/reference/quickstart/aggs-tutorial.asciidoc index 0a8494c3eb75d..daf72e5dcfbcf 100644 --- a/docs/reference/quickstart/aggs-tutorial.asciidoc +++ b/docs/reference/quickstart/aggs-tutorial.asciidoc @@ -524,7 +524,7 @@ GET kibana_sample_data_ecommerce/_search ---- // TEST[skip:Using Kibana sample data] <1> Descriptive name for the time-series aggregation results. -<2> The `date_histogram` aggregration groups documents into time-based buckets, similar to terms aggregation but for dates. +<2> The `date_histogram` aggregation groups documents into time-based buckets, similar to terms aggregation but for dates. <3> Uses <> to handle months with different lengths. `"day"` ensures consistent daily grouping regardless of timezone. <4> Formats dates in response using <> (e.g. "yyyy-MM-dd"). Refer to <> for additional options. <5> When `min_doc_count` is 0, returns buckets for days with no orders, useful for continuous time series visualization. diff --git a/docs/reference/rest-api/common-parms.asciidoc b/docs/reference/rest-api/common-parms.asciidoc index 83c11c9256a67..4b30ac46a52cd 100644 --- a/docs/reference/rest-api/common-parms.asciidoc +++ b/docs/reference/rest-api/common-parms.asciidoc @@ -1356,3 +1356,27 @@ tag::rrf-filter[] Applies the specified <> to all of the specified sub-retrievers, according to each retriever's specifications. end::rrf-filter[] + +tag::knn-rescore-vector[] + +`rescore_vector`:: ++ +-- +(Optional, object) Functionality in preview:[]. Apply oversampling and rescoring to quantized vectors. + +NOTE: Rescoring only makes sense for quantized vectors; when <> is not used, the original vectors are used for scoring. +Rescore option will be ignored for non-quantized `dense_vector` fields. + +`oversample`:: +(Required, float) ++ +Applies the specified oversample factor to `k` on the approximate kNN search. +The approximate kNN search will: + +* Retrieve `num_candidates` candidates per shard. +* From these candidates, the top `k * oversample` candidates per shard will be rescored using the original vectors. +* The top `k` rescored candidates will be returned. + +See <> for details. +-- +end::knn-rescore-vector[] diff --git a/docs/reference/search/retriever.asciidoc b/docs/reference/search/retriever.asciidoc index 7e98297b780e6..21892b4efe5a8 100644 --- a/docs/reference/search/retriever.asciidoc +++ b/docs/reference/search/retriever.asciidoc @@ -233,6 +233,8 @@ include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-filter] + include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-similarity] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-rescore-vector] + ===== Restrictions The parameters `query_vector` and `query_vector_builder` cannot be used together. @@ -576,7 +578,7 @@ This example demonstrates how to deploy the {ml-docs}/ml-nlp-rerank.html[Elastic Follow these steps: -. Create an inference endpoint for the `rerank` task using the <>. +. Create an inference endpoint for the `rerank` task using the <>. + [source,console] ---- @@ -584,7 +586,7 @@ PUT _inference/rerank/my-elastic-rerank { "service": "elasticsearch", "service_settings": { - "model_id": ".rerank-v1", + "model_id": ".rerank-v1", "num_threads": 1, "adaptive_allocations": { <1> "enabled": true, @@ -595,7 +597,7 @@ PUT _inference/rerank/my-elastic-rerank } ---- // TEST[skip:uses ML] -<1> {ml-docs}/ml-nlp-auto-scale.html#nlp-model-adaptive-allocations[Adaptive allocations] will be enabled with the minimum of 1 and the maximum of 10 allocations. +<1> {ml-docs}/ml-nlp-auto-scale.html#nlp-model-adaptive-allocations[Adaptive allocations] will be enabled with the minimum of 1 and the maximum of 10 allocations. + . Define a `text_similarity_rerank` retriever: + diff --git a/docs/reference/search/search-your-data/knn-search.asciidoc b/docs/reference/search/search-your-data/knn-search.asciidoc index 59a903b95e4f8..4e106364fa941 100644 --- a/docs/reference/search/search-your-data/knn-search.asciidoc +++ b/docs/reference/search/search-your-data/knn-search.asciidoc @@ -781,7 +781,7 @@ What if you wanted to filter by some top-level document metadata? You can do thi NOTE: `filter` will always be over the top-level document metadata. This means you cannot filter based on `nested` - field metadata. +field metadata. [source,console] ---- @@ -1068,100 +1068,77 @@ NOTE: Approximate kNN search always uses the the global top `k` matches across shards. You cannot set the `search_type` explicitly when running kNN search. + [discrete] -[[exact-knn]] -=== Exact kNN +[[dense-vector-knn-search-rescoring]] +==== Oversampling and rescoring for quantized vectors -To run an exact kNN search, use a `script_score` query with a vector function. +When using <> for kNN search, you can optionally rescore results to balance performance and accuracy, by doing: -. Explicitly map one or more `dense_vector` fields. If you don't intend to use -the field for approximate kNN, set the `index` mapping option to `false`. This -can significantly improve indexing speed. -+ -[source,console] ----- -PUT product-index -{ - "mappings": { - "properties": { - "product-vector": { - "type": "dense_vector", - "dims": 5, - "index": false - }, - "price": { - "type": "long" - } - } - } -} ----- +* *Oversampling*: Retrieve more candidates per shard. +* *Rescoring*: Use the original vector values for re-calculating the score on the oversampled candidates. -. Index your data. -+ -[source,console] ----- -POST product-index/_bulk?refresh=true -{ "index": { "_id": "1" } } -{ "product-vector": [230.0, 300.33, -34.8988, 15.555, -200.0], "price": 1599 } -{ "index": { "_id": "2" } } -{ "product-vector": [-0.5, 100.0, -13.0, 14.8, -156.0], "price": 799 } -{ "index": { "_id": "3" } } -{ "product-vector": [0.5, 111.3, -13.0, 14.8, -156.0], "price": 1099 } -... ----- -//TEST[continued] -//TEST[s/\.\.\.//] +As the non-quantized, original vectors are used to calculate the final score on the top results, rescoring combines: + +* The performance and memory gains of approximate retrieval using quantized vectors for retrieving the top candidates. +* The accuracy of using the original vectors for rescoring the top candidates. + +All forms of quantization will result in some accuracy loss and as the quantization level increases the accuracy loss will also increase. +Generally, we have found that: + +* `int8` requires minimal if any rescoring +* `int4` requires some rescoring for higher accuracy and larger recall scenarios. Generally, oversampling by 1.5x-2x recovers most of the accuracy loss. +* `bbq` requires rescoring except on exceptionally large indices or models specifically designed for quantization. We have found that between 3x-5x oversampling is generally sufficient. But for fewer dimensions or vectors that do not quantize well, higher oversampling may be required. + +You can use the `rescore_vector` preview:[] option to automatically perform reranking. +When a rescore `oversample` parameter is specified, the approximate kNN search will: + +* Retrieve `num_candidates` candidates per shard. +* From these candidates, the top `k * oversample` candidates per shard will be rescored using the original vectors. +* The top `k` rescored candidates will be returned. + +Here is an example of using the `rescore_vector` option with the `oversample` parameter: -. Use the <> to run a `script_score` query containing -a <>. -+ -TIP: To limit the number of matched documents passed to the vector function, we -recommend you specify a filter query in the `script_score.query` parameter. If -needed, you can use a <> in this -parameter to match all documents. However, matching all documents can -significantly increase search latency. -+ [source,console] ---- -POST product-index/_search +POST image-index/_search { - "query": { - "script_score": { - "query" : { - "bool" : { - "filter" : { - "range" : { - "price" : { - "gte": 1000 - } - } - } - } - }, - "script": { - "source": "cosineSimilarity(params.queryVector, 'product-vector') + 1.0", - "params": { - "queryVector": [-0.5, 90.0, -10, 14.8, -156.0] - } - } + "knn": { + "field": "image-vector", + "query_vector": [-5, 9, -12], + "k": 10, + "num_candidates": 100, + "rescore_vector": { + "oversample": 2.0 } - } + }, + "fields": [ "title", "file-type" ] } ---- //TEST[continued] +// TEST[s/"k": 10/"k": 3/] +// TEST[s/"num_candidates": 100/"num_candidates": 3/] + +This example will: + +* Search using approximate kNN for the top 100 candidates. +* Rescore the top 20 candidates (`oversample * k`) per shard using the original, non quantized vectors. +* Return the top 10 (`k`) rescored candidates. +* Merge the rescored canddidates from all shards, and return the top 10 (`k`) results. [discrete] -[[dense-vector-knn-search-reranking]] -==== Oversampling and rescoring for quantized vectors +[[dense-vector-knn-search-rescoring-rescore-additional]] +===== Additional rescoring techniques -All forms of quantization will result in some accuracy loss and as the quantization level increases the accuracy loss will also increase. -Generally, we have found that: -- `int8` requires minimal if any rescoring -- `int4` requires some rescoring for higher accuracy and larger recall scenarios. Generally, oversampling by 1.5x-2x recovers most of the accuracy loss. -- `bbq` requires rescoring except on exceptionally large indices or models specifically designed for quantization. We have found that between 3x-5x oversampling is generally sufficient. But for fewer dimensions or vectors that do not quantize well, higher oversampling may be required. +The following sections provide additional ways of rescoring: + +[discrete] +[[dense-vector-knn-search-rescoring-rescore-section]] +====== Use the `rescore` section for top-level kNN search + +You can use this option when you don't want to rescore on each shard, but on the top results from all shards. -There are two main ways to oversample and rescore. The first is to utilize the <> in the `_search` request. +Use the <> in the `_search` request to rescore the top results from a kNN search. Here is an example using the top level `knn` search with oversampling and using `rescore` to rerank the results: @@ -1210,8 +1187,16 @@ gathering 20 nearest neighbors according to quantized scoring and rescoring with <5> The weight of the original query, here we simply throw away the original score <6> The weight of the rescore query, here we only use the rescore query -The second way is to score per shard with the <> and <>. Generally, this means that there will be more rescoring per shard, but this -can increase overall recall at the cost of compute. + +[discrete] +[[dense-vector-knn-search-rescoring-script-score]] +====== Use a `script_score` query to rescore per shard + +You can use this option when you want to rescore on each shard and want more fine-grained control on the rescoring +than the `rescore_vector` option provides. + +Use rescore per shard with the <> and <>. +Generally, this means that there will be more rescoring per shard, but this can increase overall recall at the cost of compute. [source,console] -------------------------------------------------- @@ -1243,3 +1228,87 @@ POST /my-index/_search <3> The number of candidates to use for the initial approximate `knn` search. This will search using the quantized vectors and return the top 20 candidates per shard to then be scored <4> The script to score the results. Script score will interact directly with the originally provided float32 vector. + + +[discrete] +[[exact-knn]] +=== Exact kNN + +To run an exact kNN search, use a `script_score` query with a vector function. + +. Explicitly map one or more `dense_vector` fields. If you don't intend to use +the field for approximate kNN, set the `index` mapping option to `false`. This +can significantly improve indexing speed. ++ +[source,console] +---- +PUT product-index +{ + "mappings": { + "properties": { + "product-vector": { + "type": "dense_vector", + "dims": 5, + "index": false + }, + "price": { + "type": "long" + } + } + } +} +---- + +. Index your data. ++ +[source,console] +---- +POST product-index/_bulk?refresh=true +{ "index": { "_id": "1" } } +{ "product-vector": [230.0, 300.33, -34.8988, 15.555, -200.0], "price": 1599 } +{ "index": { "_id": "2" } } +{ "product-vector": [-0.5, 100.0, -13.0, 14.8, -156.0], "price": 799 } +{ "index": { "_id": "3" } } +{ "product-vector": [0.5, 111.3, -13.0, 14.8, -156.0], "price": 1099 } +... +---- +//TEST[continued] +//TEST[s/\.\.\.//] + +. Use the <> to run a `script_score` query containing +a <>. ++ +TIP: To limit the number of matched documents passed to the vector function, we +recommend you specify a filter query in the `script_score.query` parameter. If +needed, you can use a <> in this +parameter to match all documents. However, matching all documents can +significantly increase search latency. ++ +[source,console] +---- +POST product-index/_search +{ + "query": { + "script_score": { + "query" : { + "bool" : { + "filter" : { + "range" : { + "price" : { + "gte": 1000 + } + } + } + } + }, + "script": { + "source": "cosineSimilarity(params.queryVector, 'product-vector') + 1.0", + "params": { + "queryVector": [-0.5, 90.0, -10, 14.8, -156.0] + } + } + } + } +} +---- +//TEST[continued] diff --git a/docs/reference/search/search-your-data/retrievers-examples.asciidoc b/docs/reference/search/search-your-data/retrievers-examples.asciidoc index 5cada8960aeab..c0be7432aa179 100644 --- a/docs/reference/search/search-your-data/retrievers-examples.asciidoc +++ b/docs/reference/search/search-your-data/retrievers-examples.asciidoc @@ -8,18 +8,25 @@ Learn how to combine different retrievers in these hands-on examples. ==== Add example data To begin with, lets create the `retrievers_example` index, and add some documents to it. +We will set `number_of_shards=1` for our examples to ensure consistent and reproducible ordering. [source,console] ---- PUT retrievers_example { + "settings": { + "number_of_shards": 1 + }, "mappings": { "properties": { "vector": { "type": "dense_vector", "dims": 3, "similarity": "l2_norm", - "index": true + "index": true, + "index_options": { + "type": "flat" + } }, "text": { "type": "text" @@ -458,6 +465,9 @@ and index a couple of documents. ---- PUT retrievers_example_nested { + "settings": { + "number_of_shards": 1 + }, "mappings": { "properties": { "nested_field": { @@ -470,7 +480,10 @@ PUT retrievers_example_nested "type": "dense_vector", "dims": 3, "similarity": "l2_norm", - "index": true + "index": true, + "index_options": { + "type": "flat" + } } } }, @@ -639,7 +652,7 @@ This would propagate the `inner_hits` defined for the `knn` query to the `rrf` r "value": 3, "relation": "eq" }, - "max_score": 0.44353113, + "max_score": 0.44444445, "hits": [ { "_index": "retrievers_example_nested", @@ -648,7 +661,7 @@ This would propagate the `inner_hits` defined for the `knn` query to the `rrf` r "field": "nested_field", "offset": 2 }, - "_score": 0.44353113, + "_score": 0.44444445, "fields": { "nested_field": [ { @@ -666,7 +679,7 @@ This would propagate the `inner_hits` defined for the `knn` query to the `rrf` r "field": "nested_field", "offset": 1 }, - "_score": 0.26567122, + "_score": 0.21301977, "fields": { "nested_field": [ { @@ -684,7 +697,7 @@ This would propagate the `inner_hits` defined for the `knn` query to the `rrf` r "field": "nested_field", "offset": 0 }, - "_score": 0.18478848, + "_score": 0.16889325, "fields": { "nested_field": [ { @@ -716,7 +729,7 @@ This would propagate the `inner_hits` defined for the `knn` query to the `rrf` r "value": 1, "relation": "eq" }, - "max_score": 0.32002488, + "max_score": 0.31715825, "hits": [ { "_index": "retrievers_example_nested", @@ -725,7 +738,7 @@ This would propagate the `inner_hits` defined for the `knn` query to the `rrf` r "field": "nested_field", "offset": 0 }, - "_score": 0.32002488, + "_score": 0.31715825, "fields": { "nested_field": [ { diff --git a/docs/reference/search/search.asciidoc b/docs/reference/search/search.asciidoc index d022605db22b1..f31f0c1ade023 100644 --- a/docs/reference/search/search.asciidoc +++ b/docs/reference/search/search.asciidoc @@ -534,6 +534,8 @@ not both. Refer to <> to learn more. (Optional, float) include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-similarity] +include::{es-ref-dir}/rest-api/common-parms.asciidoc[tag=knn-rescore-vector] + ==== [[search-api-min-score]] diff --git a/libs/entitlement/bridge/src/main/java/module-info.java b/libs/entitlement/bridge/src/main/java/module-info.java index 93292109a726e..b9055ec5fbf67 100644 --- a/libs/entitlement/bridge/src/main/java/module-info.java +++ b/libs/entitlement/bridge/src/main/java/module-info.java @@ -10,5 +10,7 @@ // This module-info is used just to satisfy your IDE. // At build and run time, the bridge is patched into the java.base module. module org.elasticsearch.entitlement.bridge { + requires java.net.http; + exports org.elasticsearch.entitlement.bridge; } diff --git a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java index 1e03c61df98e4..4efc81375531b 100644 --- a/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java +++ b/libs/entitlement/bridge/src/main/java/org/elasticsearch/entitlement/bridge/EntitlementChecker.java @@ -30,7 +30,19 @@ import java.net.URL; import java.net.URLStreamHandler; import java.net.URLStreamHandlerFactory; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.nio.ByteBuffer; +import java.nio.channels.AsynchronousServerSocketChannel; +import java.nio.channels.AsynchronousSocketChannel; +import java.nio.channels.CompletionHandler; +import java.nio.channels.DatagramChannel; +import java.nio.channels.ServerSocketChannel; +import java.nio.channels.SocketChannel; +import java.security.cert.CertStoreParameters; import java.util.List; +import java.util.Properties; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HttpsURLConnection; @@ -110,6 +122,15 @@ public interface EntitlementChecker { void check$java_lang_ProcessBuilder$$startPipeline(Class callerClass, List builders); + //////////////////// + // + // System Properties and similar + // + + void check$java_lang_System$$setProperty(Class callerClass, String key, String value); + + void check$java_lang_System$$clearProperty(Class callerClass, String key); + //////////////////// // // JVM-wide state changes @@ -121,6 +142,8 @@ public interface EntitlementChecker { void check$java_lang_System$$setErr(Class callerClass, PrintStream err); + void check$java_lang_System$$setProperties(Class callerClass, Properties props); + void check$java_lang_Runtime$addShutdownHook(Class callerClass, Runtime runtime, Thread hook); void check$java_lang_Runtime$removeShutdownHook(Class callerClass, Runtime runtime, Thread hook); @@ -254,4 +277,125 @@ public interface EntitlementChecker { void check$java_net_Socket$connect(Class callerClass, Socket that, SocketAddress endpoint); void check$java_net_Socket$connect(Class callerClass, Socket that, SocketAddress endpoint, int backlog); + + // Network miscellanea + void check$java_net_URL$openConnection(Class callerClass, java.net.URL that, Proxy proxy); + + // HttpClient#send and sendAsync are abstract, so we instrument their internal implementations + void check$jdk_internal_net_http_HttpClientImpl$send( + Class callerClass, + HttpClient that, + HttpRequest request, + HttpResponse.BodyHandler responseBodyHandler + ); + + void check$jdk_internal_net_http_HttpClientImpl$sendAsync( + Class callerClass, + HttpClient that, + HttpRequest userRequest, + HttpResponse.BodyHandler responseHandler + ); + + void check$jdk_internal_net_http_HttpClientImpl$sendAsync( + Class callerClass, + HttpClient that, + HttpRequest userRequest, + HttpResponse.BodyHandler responseHandler, + HttpResponse.PushPromiseHandler pushPromiseHandler + ); + + void check$jdk_internal_net_http_HttpClientFacade$send( + Class callerClass, + HttpClient that, + HttpRequest request, + HttpResponse.BodyHandler responseBodyHandler + ); + + void check$jdk_internal_net_http_HttpClientFacade$sendAsync( + Class callerClass, + HttpClient that, + HttpRequest userRequest, + HttpResponse.BodyHandler responseHandler + ); + + void check$jdk_internal_net_http_HttpClientFacade$sendAsync( + Class callerClass, + HttpClient that, + HttpRequest userRequest, + HttpResponse.BodyHandler responseHandler, + HttpResponse.PushPromiseHandler pushPromiseHandler + ); + + // We need to check the LDAPCertStore, as this will connect, but this is internal/created via SPI, + // so we instrument the general factory instead and then filter in the check method implementation + void check$java_security_cert_CertStore$$getInstance(Class callerClass, String type, CertStoreParameters params); + + /* NIO + * For NIO, we are sometime able to check a method on the public surface/interface (e.g. AsynchronousServerSocketChannel#bind) + * but most of the time these methods are abstract in the public classes/interfaces (e.g. ServerSocketChannel#accept, + * NetworkChannel#bind), so we are forced to implement the "impl" classes. + * You can distinguish the 2 cases form the namespaces: java_nio_channels for the public ones, sun_nio_ch for the implementation + * classes. When you see a check on a sun_nio_ch class/method, this means the matching method on the public class is abstract + * (not instrumentable). + */ + + // bind + + void check$java_nio_channels_AsynchronousServerSocketChannel$bind( + Class callerClass, + AsynchronousServerSocketChannel that, + SocketAddress local + ); + + void check$sun_nio_ch_AsynchronousServerSocketChannelImpl$bind( + Class callerClass, + AsynchronousServerSocketChannel that, + SocketAddress local, + int backlog + ); + + void check$sun_nio_ch_AsynchronousSocketChannelImpl$bind(Class callerClass, AsynchronousSocketChannel that, SocketAddress local); + + void check$sun_nio_ch_DatagramChannelImpl$bind(Class callerClass, DatagramChannel that, SocketAddress local); + + void check$java_nio_channels_ServerSocketChannel$bind(Class callerClass, ServerSocketChannel that, SocketAddress local); + + void check$sun_nio_ch_ServerSocketChannelImpl$bind(Class callerClass, ServerSocketChannel that, SocketAddress local, int backlog); + + void check$sun_nio_ch_SocketChannelImpl$bind(Class callerClass, SocketChannel that, SocketAddress local); + + // connect + + void check$sun_nio_ch_SocketChannelImpl$connect(Class callerClass, SocketChannel that, SocketAddress remote); + + void check$sun_nio_ch_AsynchronousSocketChannelImpl$connect(Class callerClass, AsynchronousSocketChannel that, SocketAddress remote); + + void check$sun_nio_ch_AsynchronousSocketChannelImpl$connect( + Class callerClass, + AsynchronousSocketChannel that, + SocketAddress remote, + Object attachment, + CompletionHandler handler + ); + + void check$sun_nio_ch_DatagramChannelImpl$connect(Class callerClass, DatagramChannel that, SocketAddress remote); + + // accept + + void check$sun_nio_ch_ServerSocketChannelImpl$accept(Class callerClass, ServerSocketChannel that); + + void check$sun_nio_ch_AsynchronousServerSocketChannelImpl$accept(Class callerClass, AsynchronousServerSocketChannel that); + + void check$sun_nio_ch_AsynchronousServerSocketChannelImpl$accept( + Class callerClass, + AsynchronousServerSocketChannel that, + Object attachment, + CompletionHandler handler + ); + + // send/receive + + void check$sun_nio_ch_DatagramChannelImpl$send(Class callerClass, DatagramChannel that, ByteBuffer src, SocketAddress target); + + void check$sun_nio_ch_DatagramChannelImpl$receive(Class callerClass, DatagramChannel that, ByteBuffer dst); } diff --git a/libs/entitlement/qa/common/src/main/java/module-info.java b/libs/entitlement/qa/common/src/main/java/module-info.java index 211b7041e97ea..c40240f3dc1d5 100644 --- a/libs/entitlement/qa/common/src/main/java/module-info.java +++ b/libs/entitlement/qa/common/src/main/java/module-info.java @@ -14,6 +14,7 @@ // Modules we'll attempt to use in order to exercise entitlements requires java.logging; + requires java.net.http; exports org.elasticsearch.entitlement.qa.common; } diff --git a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/NetworkAccessCheckActions.java b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/NetworkAccessCheckActions.java index c88d4ce2b11a9..49cf586ea1285 100644 --- a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/NetworkAccessCheckActions.java +++ b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/NetworkAccessCheckActions.java @@ -17,7 +17,24 @@ import java.net.Proxy; import java.net.ServerSocket; import java.net.Socket; +import java.net.SocketException; +import java.net.URI; +import java.net.URISyntaxException; +import java.nio.ByteBuffer; +import java.nio.channels.AsynchronousServerSocketChannel; +import java.nio.channels.AsynchronousSocketChannel; +import java.nio.channels.CompletionHandler; +import java.nio.channels.DatagramChannel; +import java.nio.channels.NotYetBoundException; +import java.nio.channels.ServerSocketChannel; +import java.nio.channels.SocketChannel; +import java.security.InvalidAlgorithmParameterException; +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertStore; +import java.util.Arrays; +import java.util.concurrent.ExecutionException; +@SuppressForbidden(reason = "Testing entitlement check on forbidden action") class NetworkAccessCheckActions { static void serverSocketAccept() throws IOException { @@ -40,7 +57,6 @@ static void serverSocketBind() throws IOException { } } - @SuppressForbidden(reason = "Testing entitlement check on forbidden action") static void createSocketWithProxy() throws IOException { try (Socket socket = new Socket(new Proxy(Proxy.Type.HTTP, new InetSocketAddress(0)))) { assert socket.isBound() == false; @@ -53,10 +69,175 @@ static void socketBind() throws IOException { } } - @SuppressForbidden(reason = "Testing entitlement check on forbidden action") static void socketConnect() throws IOException { try (Socket socket = new DummyImplementations.DummySocket()) { socket.connect(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); } } + + static void urlOpenConnectionWithProxy() throws URISyntaxException, IOException { + var url = new URI("http://localhost").toURL(); + var urlConnection = url.openConnection(new Proxy(Proxy.Type.HTTP, new InetSocketAddress(0))); + assert urlConnection != null; + } + + static void createLDAPCertStore() throws NoSuchAlgorithmException { + try { + // We pass down null params to provoke a InvalidAlgorithmParameterException + CertStore.getInstance("LDAP", null); + } catch (InvalidAlgorithmParameterException ex) { + // Assert we actually hit the class we care about, LDAPCertStore (or its impl) + assert Arrays.stream(ex.getStackTrace()).anyMatch(e -> e.getClassName().endsWith("LDAPCertStore")); + } + } + + static void serverSocketChannelBind() throws IOException { + try (var serverSocketChannel = ServerSocketChannel.open()) { + serverSocketChannel.bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); + } + } + + static void serverSocketChannelBindWithBacklog() throws IOException { + try (var serverSocketChannel = ServerSocketChannel.open()) { + serverSocketChannel.bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 50); + } + } + + static void serverSocketChannelAccept() throws IOException { + try (var serverSocketChannel = ServerSocketChannel.open()) { + serverSocketChannel.configureBlocking(false); + try { + serverSocketChannel.accept(); + } catch (NotYetBoundException e) { + // It's OK, we did not call bind on the socket on purpose so we can just test "accept" + // "accept" will be called and exercise the Entitlement check, we don't care if it fails afterward for this known reason. + } + } + } + + static void asynchronousServerSocketChannelBind() throws IOException { + try (var serverSocketChannel = AsynchronousServerSocketChannel.open()) { + serverSocketChannel.bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); + } + } + + static void asynchronousServerSocketChannelBindWithBacklog() throws IOException { + try (var serverSocketChannel = AsynchronousServerSocketChannel.open()) { + serverSocketChannel.bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), 50); + } + } + + static void asynchronousServerSocketChannelAccept() throws IOException { + try (var serverSocketChannel = AsynchronousServerSocketChannel.open()) { + try { + var future = serverSocketChannel.accept(); + future.cancel(true); + } catch (NotYetBoundException e) { + // It's OK, we did not call bind on the socket on purpose so we can just test "accept" + // "accept" will be called and exercise the Entitlement check, we don't care if it fails afterward for this known reason. + } + } + } + + static void asynchronousServerSocketChannelAcceptWithHandler() throws IOException { + try (var serverSocketChannel = AsynchronousServerSocketChannel.open()) { + try { + serverSocketChannel.accept(null, new CompletionHandler<>() { + @Override + public void completed(AsynchronousSocketChannel result, Object attachment) {} + + @Override + public void failed(Throwable exc, Object attachment) { + assert exc.getClass().getSimpleName().equals("NotEntitledException") == false; + } + }); + } catch (NotYetBoundException e) { + // It's OK, we did not call bind on the socket on purpose so we can just test "accept" + // "accept" will be called and exercise the Entitlement check, we don't care if it fails afterward for this known reason. + } + } + } + + static void socketChannelBind() throws IOException { + try (var socketChannel = SocketChannel.open()) { + socketChannel.bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); + } + } + + static void socketChannelConnect() throws IOException { + try (var socketChannel = SocketChannel.open()) { + try { + socketChannel.connect(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); + } catch (SocketException e) { + // We expect to fail, not a valid address to connect to. + // "connect" will be called and exercise the Entitlement check, we don't care if it fails afterward for this known reason. + } + } + } + + static void asynchronousSocketChannelBind() throws IOException { + try (var socketChannel = AsynchronousSocketChannel.open()) { + socketChannel.bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); + } + } + + static void asynchronousSocketChannelConnect() throws IOException, InterruptedException { + try (var socketChannel = AsynchronousSocketChannel.open()) { + var future = socketChannel.connect(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); + try { + future.get(); + } catch (ExecutionException e) { + assert e.getCause().getClass().getSimpleName().equals("NotEntitledException") == false; + } finally { + future.cancel(true); + } + } + } + + static void asynchronousSocketChannelConnectWithCompletion() throws IOException { + try (var socketChannel = AsynchronousSocketChannel.open()) { + socketChannel.connect(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0), null, new CompletionHandler<>() { + @Override + public void completed(Void result, Object attachment) {} + + @Override + public void failed(Throwable exc, Object attachment) { + assert exc.getClass().getSimpleName().equals("NotEntitledException") == false; + } + }); + } + } + + static void datagramChannelBind() throws IOException { + try (var channel = DatagramChannel.open()) { + channel.bind(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); + } + } + + static void datagramChannelConnect() throws IOException { + try (var channel = DatagramChannel.open()) { + channel.configureBlocking(false); + try { + channel.connect(new InetSocketAddress(InetAddress.getLoopbackAddress(), 0)); + } catch (SocketException e) { + // We expect to fail, not a valid address to connect to. + // "connect" will be called and exercise the Entitlement check, we don't care if it fails afterward for this known reason. + } + } + } + + static void datagramChannelSend() throws IOException { + try (var channel = DatagramChannel.open()) { + channel.configureBlocking(false); + channel.send(ByteBuffer.wrap(new byte[] { 0 }), new InetSocketAddress(InetAddress.getLoopbackAddress(), 1234)); + } + } + + static void datagramChannelReceive() throws IOException { + try (var channel = DatagramChannel.open()) { + channel.configureBlocking(false); + var buffer = new byte[1]; + channel.receive(ByteBuffer.wrap(buffer)); + } + } } diff --git a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java index 9e7e6e33f3eda..dc619594e9e12 100644 --- a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java +++ b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/RestEntitlementsCheckAction.java @@ -90,7 +90,7 @@ static CheckAction alwaysDenied(CheckedRunnable action) { } } - private static final Map checkActions = Stream.of( + private static final Map checkActions = Stream.>of( entry("runtime_exit", deniedToPlugins(RestEntitlementsCheckAction::runtimeExit)), entry("runtime_halt", deniedToPlugins(RestEntitlementsCheckAction::runtimeHalt)), entry("system_exit", deniedToPlugins(RestEntitlementsCheckAction::systemExit)), @@ -124,6 +124,10 @@ static CheckAction alwaysDenied(CheckedRunnable action) { entry("timeZoneNameProvider", alwaysDenied(RestEntitlementsCheckAction::timeZoneNameProvider$)), entry("logManager", alwaysDenied(RestEntitlementsCheckAction::logManager$)), + entry("system_setProperty", forPlugins(WritePropertiesCheckActions::setSystemProperty)), + entry("system_clearProperty", forPlugins(WritePropertiesCheckActions::clearSystemProperty)), + entry("system_setSystemProperties", alwaysDenied(WritePropertiesCheckActions::setSystemProperties)), + // This group is a bit nasty: if entitlements don't prevent these, then networking is // irreparably borked for the remainder of the test run. entry( @@ -157,7 +161,38 @@ static CheckAction alwaysDenied(CheckedRunnable action) { entry("socket_bind", forPlugins(NetworkAccessCheckActions::socketBind)), entry("socket_connect", forPlugins(NetworkAccessCheckActions::socketConnect)), entry("server_socket_bind", forPlugins(NetworkAccessCheckActions::serverSocketBind)), - entry("server_socket_accept", forPlugins(NetworkAccessCheckActions::serverSocketAccept)) + entry("server_socket_accept", forPlugins(NetworkAccessCheckActions::serverSocketAccept)), + + entry("url_open_connection_proxy", forPlugins(NetworkAccessCheckActions::urlOpenConnectionWithProxy)), + entry("http_client_send", forPlugins(VersionSpecificNetworkChecks::httpClientSend)), + entry("http_client_send_async", forPlugins(VersionSpecificNetworkChecks::httpClientSendAsync)), + entry("create_ldap_cert_store", forPlugins(NetworkAccessCheckActions::createLDAPCertStore)), + + entry("server_socket_channel_bind", forPlugins(NetworkAccessCheckActions::serverSocketChannelBind)), + entry("server_socket_channel_bind_backlog", forPlugins(NetworkAccessCheckActions::serverSocketChannelBindWithBacklog)), + entry("server_socket_channel_accept", forPlugins(NetworkAccessCheckActions::serverSocketChannelAccept)), + entry("asynchronous_server_socket_channel_bind", forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelBind)), + entry( + "asynchronous_server_socket_channel_bind_backlog", + forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelBindWithBacklog) + ), + entry("asynchronous_server_socket_channel_accept", forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelAccept)), + entry( + "asynchronous_server_socket_channel_accept_with_handler", + forPlugins(NetworkAccessCheckActions::asynchronousServerSocketChannelAcceptWithHandler) + ), + entry("socket_channel_bind", forPlugins(NetworkAccessCheckActions::socketChannelBind)), + entry("socket_channel_connect", forPlugins(NetworkAccessCheckActions::socketChannelConnect)), + entry("asynchronous_socket_channel_bind", forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelBind)), + entry("asynchronous_socket_channel_connect", forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelConnect)), + entry( + "asynchronous_socket_channel_connect_with_completion", + forPlugins(NetworkAccessCheckActions::asynchronousSocketChannelConnectWithCompletion) + ), + entry("datagram_channel_bind", forPlugins(NetworkAccessCheckActions::datagramChannelBind)), + entry("datagram_channel_connect", forPlugins(NetworkAccessCheckActions::datagramChannelConnect)), + entry("datagram_channel_send", forPlugins(NetworkAccessCheckActions::datagramChannelSend)), + entry("datagram_channel_receive", forPlugins(NetworkAccessCheckActions::datagramChannelReceive)) ) .filter(entry -> entry.getValue().fromJavaVersion() == null || Runtime.version().feature() >= entry.getValue().fromJavaVersion()) .collect(Collectors.toUnmodifiableMap(Map.Entry::getKey, Map.Entry::getValue)); diff --git a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/VersionSpecificNetworkChecks.java b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/VersionSpecificNetworkChecks.java index e1e0b9e52f510..df7777b6614aa 100644 --- a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/VersionSpecificNetworkChecks.java +++ b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/VersionSpecificNetworkChecks.java @@ -9,6 +9,26 @@ package org.elasticsearch.entitlement.qa.common; +import java.io.IOException; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; + class VersionSpecificNetworkChecks { static void createInetAddressResolverProvider() {} + + static void httpClientSend() throws InterruptedException { + HttpClient httpClient = HttpClient.newBuilder().build(); + try { + httpClient.send(HttpRequest.newBuilder(URI.create("http://localhost")).build(), HttpResponse.BodyHandlers.discarding()); + } catch (IOException e) { + // Expected, the send action may fail with these parameters (but after it run the entitlement check in the prologue) + } + } + + static void httpClientSendAsync() { + HttpClient httpClient = HttpClient.newBuilder().build(); + httpClient.sendAsync(HttpRequest.newBuilder(URI.create("http://localhost")).build(), HttpResponse.BodyHandlers.discarding()); + } } diff --git a/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/WritePropertiesCheckActions.java b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/WritePropertiesCheckActions.java new file mode 100644 index 0000000000000..8e5173796458d --- /dev/null +++ b/libs/entitlement/qa/common/src/main/java/org/elasticsearch/entitlement/qa/common/WritePropertiesCheckActions.java @@ -0,0 +1,35 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.common; + +import org.elasticsearch.core.SuppressForbidden; + +@SuppressForbidden(reason = "testing entitlements") +class WritePropertiesCheckActions { + private WritePropertiesCheckActions() {} + + static void setSystemProperty() { + System.setProperty("es.entitlements.checkSetSystemProperty", "true"); + try { + System.clearProperty("es.entitlements.checkSetSystemProperty"); + } catch (RuntimeException e) { + // ignore for this test case + } + + } + + static void clearSystemProperty() { + System.clearProperty("es.entitlements.checkClearSystemProperty"); + } + + static void setSystemProperties() { + System.setProperties(System.getProperties()); // no side effect in case if allowed (but shouldn't) + } +} diff --git a/libs/entitlement/qa/common/src/main18/java/org/elasticsearch/entitlement/qa/common/VersionSpecificNetworkChecks.java b/libs/entitlement/qa/common/src/main18/java/org/elasticsearch/entitlement/qa/common/VersionSpecificNetworkChecks.java index 0ead32ec480ee..6229b7f8e6cfc 100644 --- a/libs/entitlement/qa/common/src/main18/java/org/elasticsearch/entitlement/qa/common/VersionSpecificNetworkChecks.java +++ b/libs/entitlement/qa/common/src/main18/java/org/elasticsearch/entitlement/qa/common/VersionSpecificNetworkChecks.java @@ -9,6 +9,11 @@ package org.elasticsearch.entitlement.qa.common; +import java.io.IOException; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; import java.net.spi.InetAddressResolver; import java.net.spi.InetAddressResolverProvider; @@ -26,4 +31,18 @@ public String name() { } }; } + + static void httpClientSend() throws InterruptedException { + HttpClient httpClient = HttpClient.newBuilder().build(); + try { + httpClient.send(HttpRequest.newBuilder(URI.create("http://localhost")).build(), HttpResponse.BodyHandlers.discarding()); + } catch (IOException e) { + // Expected, the send action may fail with these parameters (but after it run the entitlement check in the prologue) + } + } + + static void httpClientSendAsync() { + HttpClient httpClient = HttpClient.newBuilder().build(); + httpClient.sendAsync(HttpRequest.newBuilder(URI.create("http://localhost")).build(), HttpResponse.BodyHandlers.discarding()); + } } diff --git a/libs/entitlement/qa/common/src/main21/java/org/elasticsearch/entitlement/qa/common/VersionSpecificNetworkChecks.java b/libs/entitlement/qa/common/src/main21/java/org/elasticsearch/entitlement/qa/common/VersionSpecificNetworkChecks.java new file mode 100644 index 0000000000000..8dcee7e7603de --- /dev/null +++ b/libs/entitlement/qa/common/src/main21/java/org/elasticsearch/entitlement/qa/common/VersionSpecificNetworkChecks.java @@ -0,0 +1,64 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.qa.common; + +import java.io.IOException; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.net.spi.InetAddressResolver; +import java.net.spi.InetAddressResolverProvider; + +class VersionSpecificNetworkChecks { + static void createInetAddressResolverProvider() { + var x = new InetAddressResolverProvider() { + @Override + public InetAddressResolver get(Configuration configuration) { + return null; + } + + @Override + public String name() { + return "TEST"; + } + }; + } + + static void httpClientSend() throws InterruptedException { + try (HttpClient httpClient = HttpClient.newBuilder().build()) { + // Shutdown the client, so the send action will shortcut before actually executing any network operation + // (but after it run our check in the prologue) + httpClient.shutdown(); + try { + httpClient.send(HttpRequest.newBuilder(URI.create("http://localhost")).build(), HttpResponse.BodyHandlers.discarding()); + } catch (IOException e) { + // Expected, since we shut down the client + } + } + } + + static void httpClientSendAsync() { + try (HttpClient httpClient = HttpClient.newBuilder().build()) { + // Shutdown the client, so the send action will return before actually executing any network operation + // (but after it run our check in the prologue) + httpClient.shutdown(); + var future = httpClient.sendAsync( + HttpRequest.newBuilder(URI.create("http://localhost")).build(), + HttpResponse.BodyHandlers.discarding() + ); + assert future.isCompletedExceptionally(); + future.exceptionally(ex -> { + assert ex instanceof IOException; + return null; + }); + } + } +} diff --git a/libs/entitlement/qa/entitlement-allowed-nonmodular/src/main/plugin-metadata/entitlement-policy.yaml b/libs/entitlement/qa/entitlement-allowed-nonmodular/src/main/plugin-metadata/entitlement-policy.yaml index 05a94f09264a8..e66c76a8f93c3 100644 --- a/libs/entitlement/qa/entitlement-allowed-nonmodular/src/main/plugin-metadata/entitlement-policy.yaml +++ b/libs/entitlement/qa/entitlement-allowed-nonmodular/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,8 +1,9 @@ ALL-UNNAMED: - create_class_loader - set_https_connection_properties - - network: - actions: - - listen - - accept - - connect + - inbound_network + - outbound_network + - write_system_properties: + properties: + - es.entitlements.checkSetSystemProperty + - es.entitlements.checkClearSystemProperty diff --git a/libs/entitlement/qa/entitlement-allowed/src/main/plugin-metadata/entitlement-policy.yaml b/libs/entitlement/qa/entitlement-allowed/src/main/plugin-metadata/entitlement-policy.yaml index 0d2c66c2daa2c..a41781c56c805 100644 --- a/libs/entitlement/qa/entitlement-allowed/src/main/plugin-metadata/entitlement-policy.yaml +++ b/libs/entitlement/qa/entitlement-allowed/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,8 +1,9 @@ org.elasticsearch.entitlement.qa.common: - create_class_loader - set_https_connection_properties - - network: - actions: - - listen - - accept - - connect + - inbound_network + - outbound_network + - write_system_properties: + properties: + - es.entitlements.checkSetSystemProperty + - es.entitlements.checkClearSystemProperty diff --git a/libs/entitlement/qa/entitlement-denied-nonmodular/src/main/plugin-metadata/entitlement-policy.yaml b/libs/entitlement/qa/entitlement-denied-nonmodular/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 0000000000000..d3a32baf3636f --- /dev/null +++ b/libs/entitlement/qa/entitlement-denied-nonmodular/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,5 @@ +ALL-UNNAMED: + - write_system_properties: + properties: + # entitlement itself not sufficient, also no wildcard support + - "*" diff --git a/libs/entitlement/qa/entitlement-denied/src/main/plugin-metadata/entitlement-policy.yaml b/libs/entitlement/qa/entitlement-denied/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 0000000000000..7b2253fd2e08a --- /dev/null +++ b/libs/entitlement/qa/entitlement-denied/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,5 @@ +org.elasticsearch.entitlement.qa.common: + - write_system_properties: + properties: + # entitlement itself not sufficient, also no wildcard support + - "*" diff --git a/libs/entitlement/src/main/java/module-info.java b/libs/entitlement/src/main/java/module-info.java index b8a125b98e641..c0959f212558a 100644 --- a/libs/entitlement/src/main/java/module-info.java +++ b/libs/entitlement/src/main/java/module-info.java @@ -13,6 +13,7 @@ requires java.instrument; requires org.elasticsearch.base; requires jdk.attach; + requires java.net.http; requires static org.elasticsearch.entitlement.bridge; // At runtime, this will be in java.base diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 9b621461403d1..0ad8fc350026c 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -22,7 +22,8 @@ import org.elasticsearch.entitlement.runtime.policy.CreateClassLoaderEntitlement; import org.elasticsearch.entitlement.runtime.policy.Entitlement; import org.elasticsearch.entitlement.runtime.policy.ExitVMEntitlement; -import org.elasticsearch.entitlement.runtime.policy.NetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.InboundNetworkEntitlement; +import org.elasticsearch.entitlement.runtime.policy.OutboundNetworkEntitlement; import org.elasticsearch.entitlement.runtime.policy.Policy; import org.elasticsearch.entitlement.runtime.policy.PolicyManager; import org.elasticsearch.entitlement.runtime.policy.PolicyParser; @@ -45,9 +46,6 @@ import java.util.Set; import java.util.stream.Collectors; -import static org.elasticsearch.entitlement.runtime.policy.NetworkEntitlement.ACCEPT_ACTION; -import static org.elasticsearch.entitlement.runtime.policy.NetworkEntitlement.CONNECT_ACTION; -import static org.elasticsearch.entitlement.runtime.policy.NetworkEntitlement.LISTEN_ACTION; import static org.elasticsearch.entitlement.runtime.policy.PolicyManager.ALL_UNNAMED; /** @@ -106,10 +104,12 @@ private static PolicyManager createPolicyManager() throws IOException { List.of( new ExitVMEntitlement(), new CreateClassLoaderEntitlement(), - new NetworkEntitlement(LISTEN_ACTION | CONNECT_ACTION | ACCEPT_ACTION) + new InboundNetworkEntitlement(), + new OutboundNetworkEntitlement() ) ), - new Scope("org.apache.httpcomponents.httpclient", List.of(new NetworkEntitlement(CONNECT_ACTION))) + new Scope("org.apache.httpcomponents.httpclient", List.of(new OutboundNetworkEntitlement())), + new Scope("io.netty.transport", List.of(new InboundNetworkEntitlement(), new OutboundNetworkEntitlement())) ) ); // agents run without a module, so this is a special hack for the apm agent diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java index 695d1c574c7c3..69daca8bfbd56 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/api/ElasticsearchEntitlementChecker.java @@ -10,7 +10,6 @@ package org.elasticsearch.entitlement.runtime.api; import org.elasticsearch.entitlement.bridge.EntitlementChecker; -import org.elasticsearch.entitlement.runtime.policy.NetworkEntitlement; import org.elasticsearch.entitlement.runtime.policy.PolicyManager; import java.io.InputStream; @@ -22,6 +21,7 @@ import java.net.DatagramSocketImplFactory; import java.net.FileNameMap; import java.net.InetAddress; +import java.net.InetSocketAddress; import java.net.MulticastSocket; import java.net.NetworkInterface; import java.net.Proxy; @@ -34,7 +34,19 @@ import java.net.URL; import java.net.URLStreamHandler; import java.net.URLStreamHandlerFactory; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.nio.ByteBuffer; +import java.nio.channels.AsynchronousServerSocketChannel; +import java.nio.channels.AsynchronousSocketChannel; +import java.nio.channels.CompletionHandler; +import java.nio.channels.DatagramChannel; +import java.nio.channels.ServerSocketChannel; +import java.nio.channels.SocketChannel; +import java.security.cert.CertStoreParameters; import java.util.List; +import java.util.Properties; import javax.net.ssl.HostnameVerifier; import javax.net.ssl.HttpsURLConnection; @@ -200,6 +212,21 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { policyManager.checkChangeJVMGlobalState(callerClass); } + @Override + public void check$java_lang_System$$clearProperty(Class callerClass, String key) { + policyManager.checkWriteProperty(callerClass, key); + } + + @Override + public void check$java_lang_System$$setProperty(Class callerClass, String key, String value) { + policyManager.checkWriteProperty(callerClass, key); + } + + @Override + public void check$java_lang_System$$setProperties(Class callerClass, Properties props) { + policyManager.checkChangeJVMGlobalState(callerClass); + } + @Override public void check$java_util_spi_LocaleServiceProvider$(Class callerClass) { policyManager.checkChangeJVMGlobalState(callerClass); @@ -356,152 +383,353 @@ public ElasticsearchEntitlementChecker(PolicyManager policyManager) { @Override public void check$java_net_DatagramSocket$bind(Class callerClass, DatagramSocket that, SocketAddress addr) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION); + policyManager.checkInboundNetworkAccess(callerClass); } @Override public void check$java_net_DatagramSocket$connect(Class callerClass, DatagramSocket that, InetAddress addr) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + policyManager.checkAllNetworkAccess(callerClass); } @Override public void check$java_net_DatagramSocket$connect(Class callerClass, DatagramSocket that, SocketAddress addr) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + policyManager.checkAllNetworkAccess(callerClass); } @Override public void check$java_net_DatagramSocket$send(Class callerClass, DatagramSocket that, DatagramPacket p) { - var actions = NetworkEntitlement.CONNECT_ACTION; if (p.getAddress().isMulticastAddress()) { - actions |= NetworkEntitlement.ACCEPT_ACTION; + policyManager.checkAllNetworkAccess(callerClass); + } else { + policyManager.checkOutboundNetworkAccess(callerClass); } - policyManager.checkNetworkAccess(callerClass, actions); } @Override public void check$java_net_DatagramSocket$receive(Class callerClass, DatagramSocket that, DatagramPacket p) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.ACCEPT_ACTION); + policyManager.checkInboundNetworkAccess(callerClass); } @Override public void check$java_net_DatagramSocket$joinGroup(Class caller, DatagramSocket that, SocketAddress addr, NetworkInterface ni) { - policyManager.checkNetworkAccess(caller, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + policyManager.checkAllNetworkAccess(caller); } @Override public void check$java_net_DatagramSocket$leaveGroup(Class caller, DatagramSocket that, SocketAddress addr, NetworkInterface ni) { - policyManager.checkNetworkAccess(caller, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + policyManager.checkAllNetworkAccess(caller); } @Override - public void check$java_net_MulticastSocket$joinGroup(Class callerClass, MulticastSocket that, InetAddress addr) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + public void check$java_net_MulticastSocket$joinGroup(Class caller, MulticastSocket that, InetAddress addr) { + policyManager.checkAllNetworkAccess(caller); } @Override public void check$java_net_MulticastSocket$joinGroup(Class caller, MulticastSocket that, SocketAddress addr, NetworkInterface ni) { - policyManager.checkNetworkAccess(caller, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + policyManager.checkAllNetworkAccess(caller); } @Override public void check$java_net_MulticastSocket$leaveGroup(Class caller, MulticastSocket that, InetAddress addr) { - policyManager.checkNetworkAccess(caller, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + policyManager.checkAllNetworkAccess(caller); } @Override public void check$java_net_MulticastSocket$leaveGroup(Class caller, MulticastSocket that, SocketAddress addr, NetworkInterface ni) { - policyManager.checkNetworkAccess(caller, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + policyManager.checkAllNetworkAccess(caller); } @Override public void check$java_net_MulticastSocket$send(Class callerClass, MulticastSocket that, DatagramPacket p, byte ttl) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION); + policyManager.checkAllNetworkAccess(callerClass); } @Override public void check$java_net_ServerSocket$(Class callerClass, int port) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION); + policyManager.checkInboundNetworkAccess(callerClass); } @Override public void check$java_net_ServerSocket$(Class callerClass, int port, int backlog) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION); + policyManager.checkInboundNetworkAccess(callerClass); } @Override public void check$java_net_ServerSocket$(Class callerClass, int port, int backlog, InetAddress bindAddr) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION); + policyManager.checkInboundNetworkAccess(callerClass); } @Override public void check$java_net_ServerSocket$accept(Class callerClass, ServerSocket that) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.ACCEPT_ACTION); + policyManager.checkInboundNetworkAccess(callerClass); } @Override public void check$java_net_ServerSocket$implAccept(Class callerClass, ServerSocket that, Socket s) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.ACCEPT_ACTION); + policyManager.checkInboundNetworkAccess(callerClass); } @Override public void check$java_net_ServerSocket$bind(Class callerClass, ServerSocket that, SocketAddress endpoint) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION); + policyManager.checkInboundNetworkAccess(callerClass); } @Override public void check$java_net_ServerSocket$bind(Class callerClass, ServerSocket that, SocketAddress endpoint, int backlog) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION); + policyManager.checkInboundNetworkAccess(callerClass); } @Override public void check$java_net_Socket$(Class callerClass, Proxy proxy) { if (proxy.type() == Proxy.Type.SOCKS || proxy.type() == Proxy.Type.HTTP) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.CONNECT_ACTION); + policyManager.checkOutboundNetworkAccess(callerClass); } } @Override public void check$java_net_Socket$(Class callerClass, String host, int port) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION); + policyManager.checkOutboundNetworkAccess(callerClass); } @Override public void check$java_net_Socket$(Class callerClass, InetAddress address, int port) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION); + policyManager.checkOutboundNetworkAccess(callerClass); } @Override public void check$java_net_Socket$(Class callerClass, String host, int port, InetAddress localAddr, int localPort) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION); + policyManager.checkOutboundNetworkAccess(callerClass); } @Override public void check$java_net_Socket$(Class callerClass, InetAddress address, int port, InetAddress localAddr, int localPort) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION); + policyManager.checkOutboundNetworkAccess(callerClass); } @Override public void check$java_net_Socket$(Class callerClass, String host, int port, boolean stream) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION); + policyManager.checkOutboundNetworkAccess(callerClass); } @Override public void check$java_net_Socket$(Class callerClass, InetAddress host, int port, boolean stream) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION); + policyManager.checkOutboundNetworkAccess(callerClass); } @Override public void check$java_net_Socket$bind(Class callerClass, Socket that, SocketAddress endpoint) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.LISTEN_ACTION); + policyManager.checkOutboundNetworkAccess(callerClass); } @Override public void check$java_net_Socket$connect(Class callerClass, Socket that, SocketAddress endpoint) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.CONNECT_ACTION); + policyManager.checkOutboundNetworkAccess(callerClass); } @Override public void check$java_net_Socket$connect(Class callerClass, Socket that, SocketAddress endpoint, int backlog) { - policyManager.checkNetworkAccess(callerClass, NetworkEntitlement.CONNECT_ACTION); + policyManager.checkOutboundNetworkAccess(callerClass); + } + + @Override + public void check$java_net_URL$openConnection(Class callerClass, URL that, Proxy proxy) { + if (proxy.type() != Proxy.Type.DIRECT) { + policyManager.checkOutboundNetworkAccess(callerClass); + } + } + + @Override + public void check$jdk_internal_net_http_HttpClientImpl$send( + Class callerClass, + HttpClient that, + HttpRequest request, + HttpResponse.BodyHandler responseBodyHandler + ) { + policyManager.checkOutboundNetworkAccess(callerClass); + } + + @Override + public void check$jdk_internal_net_http_HttpClientImpl$sendAsync( + Class callerClass, + HttpClient that, + HttpRequest userRequest, + HttpResponse.BodyHandler responseHandler + ) { + policyManager.checkOutboundNetworkAccess(callerClass); + } + + @Override + public void check$jdk_internal_net_http_HttpClientImpl$sendAsync( + Class callerClass, + HttpClient that, + HttpRequest userRequest, + HttpResponse.BodyHandler responseHandler, + HttpResponse.PushPromiseHandler pushPromiseHandler + ) { + policyManager.checkOutboundNetworkAccess(callerClass); + } + + @Override + public void check$jdk_internal_net_http_HttpClientFacade$send( + Class callerClass, + HttpClient that, + HttpRequest request, + HttpResponse.BodyHandler responseBodyHandler + ) { + check$jdk_internal_net_http_HttpClientImpl$send(callerClass, that, request, responseBodyHandler); + } + + @Override + public void check$jdk_internal_net_http_HttpClientFacade$sendAsync( + Class callerClass, + HttpClient that, + HttpRequest userRequest, + HttpResponse.BodyHandler responseHandler + ) { + check$jdk_internal_net_http_HttpClientImpl$sendAsync(callerClass, that, userRequest, responseHandler); + } + + @Override + public void check$jdk_internal_net_http_HttpClientFacade$sendAsync( + Class callerClass, + HttpClient that, + HttpRequest userRequest, + HttpResponse.BodyHandler responseHandler, + HttpResponse.PushPromiseHandler pushPromiseHandler + ) { + check$jdk_internal_net_http_HttpClientImpl$sendAsync(callerClass, that, userRequest, responseHandler, pushPromiseHandler); + } + + @Override + public void check$java_security_cert_CertStore$$getInstance(Class callerClass, String type, CertStoreParameters params) { + // We need to check "just" the LDAPCertStore instantiation: this is the CertStore that will try to perform a network operation + // (connect to an LDAP server). But LDAPCertStore is internal (created via SPI), so we instrument the general factory instead and + // then do the check only for the path that leads to sensitive code (by looking at the `type` parameter). + if ("LDAP".equals(type)) { + policyManager.checkOutboundNetworkAccess(callerClass); + } + } + + @Override + public void check$java_nio_channels_AsynchronousServerSocketChannel$bind( + Class callerClass, + AsynchronousServerSocketChannel that, + SocketAddress local + ) { + policyManager.checkInboundNetworkAccess(callerClass); + } + + @Override + public void check$sun_nio_ch_AsynchronousServerSocketChannelImpl$bind( + Class callerClass, + AsynchronousServerSocketChannel that, + SocketAddress local, + int backlog + ) { + policyManager.checkInboundNetworkAccess(callerClass); + } + + @Override + public void check$sun_nio_ch_AsynchronousSocketChannelImpl$bind( + Class callerClass, + AsynchronousSocketChannel that, + SocketAddress local + ) { + policyManager.checkInboundNetworkAccess(callerClass); + } + + @Override + public void check$sun_nio_ch_DatagramChannelImpl$bind(Class callerClass, DatagramChannel that, SocketAddress local) { + policyManager.checkInboundNetworkAccess(callerClass); + } + + @Override + public void check$java_nio_channels_ServerSocketChannel$bind(Class callerClass, ServerSocketChannel that, SocketAddress local) { + policyManager.checkInboundNetworkAccess(callerClass); + } + + @Override + public void check$sun_nio_ch_ServerSocketChannelImpl$bind( + Class callerClass, + ServerSocketChannel that, + SocketAddress local, + int backlog + ) { + policyManager.checkInboundNetworkAccess(callerClass); + } + + @Override + public void check$sun_nio_ch_SocketChannelImpl$bind(Class callerClass, SocketChannel that, SocketAddress local) { + policyManager.checkOutboundNetworkAccess(callerClass); + } + + @Override + public void check$sun_nio_ch_SocketChannelImpl$connect(Class callerClass, SocketChannel that, SocketAddress remote) { + policyManager.checkOutboundNetworkAccess(callerClass); + } + + @Override + public void check$sun_nio_ch_AsynchronousSocketChannelImpl$connect( + Class callerClass, + AsynchronousSocketChannel that, + SocketAddress remote + ) { + policyManager.checkOutboundNetworkAccess(callerClass); + } + + @Override + public void check$sun_nio_ch_AsynchronousSocketChannelImpl$connect( + Class callerClass, + AsynchronousSocketChannel that, + SocketAddress remote, + Object attachment, + CompletionHandler handler + ) { + policyManager.checkOutboundNetworkAccess(callerClass); + } + + @Override + public void check$sun_nio_ch_DatagramChannelImpl$connect(Class callerClass, DatagramChannel that, SocketAddress remote) { + policyManager.checkOutboundNetworkAccess(callerClass); + } + + @Override + public void check$sun_nio_ch_ServerSocketChannelImpl$accept(Class callerClass, ServerSocketChannel that) { + policyManager.checkInboundNetworkAccess(callerClass); + } + + @Override + public void check$sun_nio_ch_AsynchronousServerSocketChannelImpl$accept(Class callerClass, AsynchronousServerSocketChannel that) { + policyManager.checkInboundNetworkAccess(callerClass); + } + + @Override + public void check$sun_nio_ch_AsynchronousServerSocketChannelImpl$accept( + Class callerClass, + AsynchronousServerSocketChannel that, + Object attachment, + CompletionHandler handler + ) { + policyManager.checkInboundNetworkAccess(callerClass); + } + + @Override + public void check$sun_nio_ch_DatagramChannelImpl$send( + Class callerClass, + DatagramChannel that, + ByteBuffer src, + SocketAddress target + ) { + if (target instanceof InetSocketAddress isa && isa.getAddress().isMulticastAddress()) { + policyManager.checkAllNetworkAccess(callerClass); + } else { + policyManager.checkOutboundNetworkAccess(callerClass); + } + } + + @Override + public void check$sun_nio_ch_DatagramChannelImpl$receive(Class callerClass, DatagramChannel that, ByteBuffer dst) { + policyManager.checkInboundNetworkAccess(callerClass); } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/InboundNetworkEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/InboundNetworkEntitlement.java new file mode 100644 index 0000000000000..482d4e5100c0b --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/InboundNetworkEntitlement.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +/** + * Describes an entitlement for inbound network actions (listen/accept/receive) + */ +public record InboundNetworkEntitlement() implements Entitlement { + @ExternalEntitlement + public InboundNetworkEntitlement {} +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/NetworkEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/NetworkEntitlement.java deleted file mode 100644 index 9b4035cee98d0..0000000000000 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/NetworkEntitlement.java +++ /dev/null @@ -1,111 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.entitlement.runtime.policy; - -import org.elasticsearch.core.Strings; - -import java.util.List; -import java.util.Map; -import java.util.Objects; -import java.util.StringJoiner; - -import static java.util.Map.entry; - -/** - * Describes a network entitlement (sockets) with actions. - */ -public class NetworkEntitlement implements Entitlement { - - public static final int LISTEN_ACTION = 0x1; - public static final int CONNECT_ACTION = 0x2; - public static final int ACCEPT_ACTION = 0x4; - - static final String LISTEN = "listen"; - static final String CONNECT = "connect"; - static final String ACCEPT = "accept"; - - private static final Map ACTION_MAP = Map.ofEntries( - entry(LISTEN, LISTEN_ACTION), - entry(CONNECT, CONNECT_ACTION), - entry(ACCEPT, ACCEPT_ACTION) - ); - - private final int actions; - - @ExternalEntitlement(parameterNames = { "actions" }, esModulesOnly = false) - public NetworkEntitlement(List actionsList) { - - int actionsInt = 0; - - for (String actionString : actionsList) { - var action = ACTION_MAP.get(actionString); - if (action == null) { - throw new IllegalArgumentException("unknown network action [" + actionString + "]"); - } - if ((actionsInt & action) == action) { - throw new IllegalArgumentException(Strings.format("network action [%s] specified multiple times", actionString)); - } - actionsInt |= action; - } - - this.actions = actionsInt; - } - - public NetworkEntitlement(int actions) { - this.actions = actions; - } - - public static String printActions(int actions) { - var joiner = new StringJoiner(","); - for (var entry : ACTION_MAP.entrySet()) { - var action = entry.getValue(); - if ((actions & action) == action) { - joiner.add(entry.getKey()); - } - } - return joiner.toString(); - } - - /** - * For the actions to match, the actions present in this entitlement must be a superset - * of the actions required by a check. - * There is only one "negative" case (action required by the check but not present in the entitlement), - * and it can be expressed efficiently via this truth table: - * this.actions | requiredActions | - * 0 | 0 | 0 - * 0 | 1 | 1 --> NOT this.action AND requiredActions - * 1 | 0 | 0 - * 1 | 1 | 0 - * - * @param requiredActions the actions required to be present for a check to pass - * @return true if requiredActions are present, false otherwise - */ - public boolean matchActions(int requiredActions) { - return (~this.actions & requiredActions) == 0; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - NetworkEntitlement that = (NetworkEntitlement) o; - return actions == that.actions; - } - - @Override - public int hashCode() { - return Objects.hash(actions); - } - - @Override - public String toString() { - return "NetworkEntitlement{actions=" + actions + '}'; - } -} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/OutboundNetworkEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/OutboundNetworkEntitlement.java new file mode 100644 index 0000000000000..50d9a47f580e5 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/OutboundNetworkEntitlement.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +/** + * Describes an entitlement for outbound network actions (connect/send) + */ +public record OutboundNetworkEntitlement() implements Entitlement { + @ExternalEntitlement(esModulesOnly = false) + public OutboundNetworkEntitlement {} +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index aeb54d5c1156c..bcc367d708465 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -114,8 +114,8 @@ public void checkStartProcess(Class callerClass) { } private void neverEntitled(Class callerClass, String operationDescription) { - var requestingModule = requestingClass(callerClass); - if (isTriviallyAllowed(requestingModule)) { + var requestingClass = requestingClass(callerClass); + if (isTriviallyAllowed(requestingClass)) { return; } @@ -123,7 +123,7 @@ private void neverEntitled(Class callerClass, String operationDescription) { Strings.format( "Not entitled: caller [%s], module [%s], operation [%s]", callerClass, - requestingModule.getName(), + requestingClass.getModule() == null ? "" : requestingClass.getModule().getName(), operationDescription ) ); @@ -134,8 +134,8 @@ private void neverEntitled(Class callerClass, String operationDescription) { * therefore, its performance is not a major concern. */ private void neverEntitled(Class callerClass, Supplier operationDescription) { - var requestingModule = requestingClass(callerClass); - if (isTriviallyAllowed(requestingModule)) { + var requestingClass = requestingClass(callerClass); + if (isTriviallyAllowed(requestingClass)) { return; } @@ -143,7 +143,7 @@ private void neverEntitled(Class callerClass, Supplier operationDescr Strings.format( "Not entitled: caller [%s], module [%s], operation [%s]", callerClass, - requestingModule.getName(), + requestingClass.getModule() == null ? "" : requestingClass.getModule().getName(), operationDescription.get() ) ); @@ -194,30 +194,73 @@ private String operationDescription(String methodName) { return methodName.substring(methodName.indexOf('$')); } - public void checkNetworkAccess(Class callerClass, int actions) { + public void checkInboundNetworkAccess(Class callerClass) { + checkEntitlementPresent(callerClass, InboundNetworkEntitlement.class); + } + + public void checkOutboundNetworkAccess(Class callerClass) { + checkEntitlementPresent(callerClass, OutboundNetworkEntitlement.class); + } + + public void checkAllNetworkAccess(Class callerClass) { var requestingClass = requestingClass(callerClass); if (isTriviallyAllowed(requestingClass)) { return; } - ModuleEntitlements entitlements = getEntitlements(requestingClass, NetworkEntitlement.class); - if (entitlements.getEntitlements(NetworkEntitlement.class).anyMatch(n -> n.matchActions(actions))) { + var classEntitlements = getEntitlements(requestingClass); + if (classEntitlements.hasEntitlement(InboundNetworkEntitlement.class) == false) { + throw new NotEntitledException( + Strings.format( + "Missing entitlement: class [%s], module [%s], entitlement [inbound_network]", + requestingClass, + requestingClass.getModule().getName() + ) + ); + } + + if (classEntitlements.hasEntitlement(OutboundNetworkEntitlement.class) == false) { + throw new NotEntitledException( + Strings.format( + "Missing entitlement: class [%s], module [%s], entitlement [outbound_network]", + requestingClass, + requestingClass.getModule().getName() + ) + ); + } + logger.debug( + () -> Strings.format( + "Entitled: class [%s], module [%s], entitlements [inbound_network, outbound_network]", + requestingClass, + requestingClass.getModule().getName() + ) + ); + } + + public void checkWriteProperty(Class callerClass, String property) { + var requestingClass = requestingClass(callerClass); + if (isTriviallyAllowed(requestingClass)) { + return; + } + + ModuleEntitlements entitlements = getEntitlements(requestingClass); + if (entitlements.getEntitlements(WriteSystemPropertiesEntitlement.class).anyMatch(e -> e.properties().contains(property))) { logger.debug( () -> Strings.format( - "Entitled: class [%s], module [%s], entitlement [network], actions [%s]", + "Entitled: class [%s], module [%s], entitlement [write_system_properties], property [%s]", requestingClass, requestingClass.getModule().getName(), - NetworkEntitlement.printActions(actions) + property ) ); return; } throw new NotEntitledException( Strings.format( - "Missing entitlement: class [%s], module [%s], entitlement [network], actions [%s]", + "Missing entitlement: class [%s], module [%s], entitlement [write_system_properties], property [%s]", requestingClass, requestingClass.getModule().getName(), - NetworkEntitlement.printActions(actions) + property ) ); } @@ -228,7 +271,7 @@ private void checkEntitlementPresent(Class callerClass, Class Strings.format( @@ -250,17 +293,14 @@ private void checkEntitlementPresent(Class callerClass, Class requestingClass, Class entitlementClass) { - return moduleEntitlementsMap.computeIfAbsent( - requestingClass.getModule(), - m -> computeEntitlements(requestingClass, entitlementClass) - ); + ModuleEntitlements getEntitlements(Class requestingClass) { + return moduleEntitlementsMap.computeIfAbsent(requestingClass.getModule(), m -> computeEntitlements(requestingClass)); } - private ModuleEntitlements computeEntitlements(Class requestingClass, Class entitlementClass) { + private ModuleEntitlements computeEntitlements(Class requestingClass) { Module requestingModule = requestingClass.getModule(); if (isServerModule(requestingModule)) { - return getModuleScopeEntitlements(requestingClass, serverEntitlements, requestingModule.getName(), "server", entitlementClass); + return getModuleScopeEntitlements(requestingClass, serverEntitlements, requestingModule.getName(), "server"); } // plugins @@ -274,7 +314,7 @@ private ModuleEntitlements computeEntitlements(Class requestingClass, Class callerClass, Map> scopeEntitlements, String moduleName, - String component, - Class entitlementClass + String component ) { var entitlements = scopeEntitlements.get(moduleName); if (entitlements == null) { - logger.warn( - "No applicable entitlement policy for entitlement [{}] in [{}], module [{}], class [{}]", - PolicyParser.getEntitlementTypeName(entitlementClass), - component, - moduleName, - callerClass - ); + logger.warn("No applicable entitlement policy for [{}], module [{}], class [{}]", component, moduleName, callerClass); return ModuleEntitlements.NONE; } return ModuleEntitlements.from(entitlements); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java index ac4d4afdd97f8..f98e0439633e3 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParser.java @@ -9,6 +9,7 @@ package org.elasticsearch.entitlement.runtime.policy; +import org.elasticsearch.xcontent.XContentLocation; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.yaml.YamlXContent; @@ -38,7 +39,9 @@ public class PolicyParser { FileEntitlement.class, CreateClassLoaderEntitlement.class, SetHttpsConnectionPropertiesEntitlement.class, - NetworkEntitlement.class + OutboundNetworkEntitlement.class, + InboundNetworkEntitlement.class, + WriteSystemPropertiesEntitlement.class ).collect(Collectors.toUnmodifiableMap(PolicyParser::getEntitlementTypeName, Function.identity())); protected final XContentParser policyParser; @@ -119,6 +122,7 @@ protected Scope parseScope(String scopeName) throws IOException { } protected Entitlement parseEntitlement(String scopeName, String entitlementType) throws IOException { + XContentLocation startLocation = policyParser.getTokenLocation(); Class entitlementClass = EXTERNAL_ENTITLEMENTS.get(entitlementType); if (entitlementClass == null) { @@ -170,7 +174,10 @@ protected Entitlement parseEntitlement(String scopeName, String entitlementType) try { return (Entitlement) entitlementConstructor.newInstance(parameterValues); } catch (InvocationTargetException | InstantiationException | IllegalAccessException e) { - throw new IllegalStateException("internal error"); + if (e.getCause() instanceof PolicyValidationException piae) { + throw newPolicyParserException(startLocation, scopeName, entitlementType, piae); + } + throw new IllegalStateException("internal error", e); } } @@ -191,4 +198,13 @@ protected PolicyParserException newPolicyParserException(String scopeName, Strin message ); } + + protected PolicyParserException newPolicyParserException( + XContentLocation location, + String scopeName, + String entitlementType, + PolicyValidationException cause + ) { + return PolicyParserException.newPolicyParserException(location, policyName, scopeName, entitlementType, cause); + } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserException.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserException.java index 5dfa12f11d0be..e7cc8bed2ca2c 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserException.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserException.java @@ -86,7 +86,36 @@ public static PolicyParserException newPolicyParserException( } } + public static PolicyParserException newPolicyParserException( + XContentLocation location, + String policyName, + String scopeName, + String entitlementType, + PolicyValidationException cause + ) { + assert (scopeName != null); + return new PolicyParserException( + "[" + + location.lineNumber() + + ":" + + location.columnNumber() + + "] policy parsing error for [" + + policyName + + "] in scope [" + + scopeName + + "] for entitlement type [" + + entitlementType + + "]: " + + cause.getMessage(), + cause + ); + } + private PolicyParserException(String message) { super(message); } + + private PolicyParserException(String message, PolicyValidationException cause) { + super(message, cause); + } } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyValidationException.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyValidationException.java new file mode 100644 index 0000000000000..a2bc49d99b44f --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyValidationException.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +/** + * This exception is used to track validation errors thrown during the construction + * of entitlements. By using this instead of other exception types the policy + * parser is able to wrap this exception with a line/character number for + * additional useful error information. + */ +class PolicyValidationException extends RuntimeException { + + PolicyValidationException(String message) { + super(message); + } + + PolicyValidationException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteAllSystemPropertiesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteAllSystemPropertiesEntitlement.java new file mode 100644 index 0000000000000..f0d1d14177332 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteAllSystemPropertiesEntitlement.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +/** + * An Entitlement to allow writing all properties such as system properties. + */ +public record WriteAllSystemPropertiesEntitlement() implements Entitlement { + @ExternalEntitlement + public WriteAllSystemPropertiesEntitlement() {} +} diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteSystemPropertiesEntitlement.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteSystemPropertiesEntitlement.java new file mode 100644 index 0000000000000..654ebbda9dab3 --- /dev/null +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/WriteSystemPropertiesEntitlement.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.entitlement.runtime.policy; + +import java.util.List; +import java.util.Set; + +/** + * An Entitlement to allow writing properties such as system properties. + */ +public record WriteSystemPropertiesEntitlement(Set properties) implements Entitlement { + + @ExternalEntitlement(parameterNames = { "properties" }, esModulesOnly = false) + public WriteSystemPropertiesEntitlement(List properties) { + this(Set.copyOf(properties)); + } +} diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/NetworkEntitlementTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/NetworkEntitlementTests.java deleted file mode 100644 index 91051d48c365f..0000000000000 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/NetworkEntitlementTests.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.entitlement.runtime.policy; - -import org.elasticsearch.test.ESTestCase; - -import java.util.List; - -import static org.hamcrest.Matchers.is; - -public class NetworkEntitlementTests extends ESTestCase { - - public void testMatchesActions() { - var listenEntitlement = new NetworkEntitlement(List.of(NetworkEntitlement.LISTEN)); - var emptyEntitlement = new NetworkEntitlement(List.of()); - var connectAcceptEntitlement = new NetworkEntitlement(List.of(NetworkEntitlement.CONNECT, NetworkEntitlement.ACCEPT)); - - assertThat(listenEntitlement.matchActions(0), is(true)); - assertThat(listenEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION), is(true)); - assertThat(listenEntitlement.matchActions(NetworkEntitlement.ACCEPT_ACTION), is(false)); - assertThat(listenEntitlement.matchActions(NetworkEntitlement.CONNECT_ACTION), is(false)); - assertThat(listenEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.ACCEPT_ACTION), is(false)); - assertThat(listenEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION), is(false)); - assertThat(listenEntitlement.matchActions(NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION), is(false)); - - assertThat(connectAcceptEntitlement.matchActions(0), is(true)); - assertThat(connectAcceptEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION), is(false)); - assertThat(connectAcceptEntitlement.matchActions(NetworkEntitlement.ACCEPT_ACTION), is(true)); - assertThat(connectAcceptEntitlement.matchActions(NetworkEntitlement.CONNECT_ACTION), is(true)); - assertThat(connectAcceptEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.ACCEPT_ACTION), is(false)); - assertThat(connectAcceptEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION), is(false)); - assertThat(connectAcceptEntitlement.matchActions(NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION), is(true)); - - assertThat(emptyEntitlement.matchActions(0), is(true)); - assertThat(emptyEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION), is(false)); - assertThat(emptyEntitlement.matchActions(NetworkEntitlement.ACCEPT_ACTION), is(false)); - assertThat(emptyEntitlement.matchActions(NetworkEntitlement.CONNECT_ACTION), is(false)); - assertThat(emptyEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.ACCEPT_ACTION), is(false)); - assertThat(emptyEntitlement.matchActions(NetworkEntitlement.LISTEN_ACTION | NetworkEntitlement.CONNECT_ACTION), is(false)); - assertThat(emptyEntitlement.matchActions(NetworkEntitlement.CONNECT_ACTION | NetworkEntitlement.ACCEPT_ACTION), is(false)); - } -} diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index 092813be75cc8..c3acefbbb323b 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -66,11 +66,7 @@ public void testGetEntitlementsThrowsOnMissingPluginUnnamedModule() { var callerClass = this.getClass(); var requestingModule = callerClass.getModule(); - assertEquals( - "No policy for the unnamed module", - ModuleEntitlements.NONE, - policyManager.getEntitlements(callerClass, Entitlement.class) - ); + assertEquals("No policy for the unnamed module", ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); } @@ -82,7 +78,7 @@ public void testGetEntitlementsThrowsOnMissingPolicyForPlugin() { var callerClass = this.getClass(); var requestingModule = callerClass.getModule(); - assertEquals("No policy for this plugin", ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass, Entitlement.class)); + assertEquals("No policy for this plugin", ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); } @@ -94,11 +90,11 @@ public void testGetEntitlementsFailureIsCached() { var callerClass = this.getClass(); var requestingModule = callerClass.getModule(); - assertEquals(ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass, Entitlement.class)); + assertEquals(ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); // A second time - assertEquals(ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass, Entitlement.class)); + assertEquals(ModuleEntitlements.NONE, policyManager.getEntitlements(callerClass)); // Nothing new in the map assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); @@ -116,7 +112,7 @@ public void testGetEntitlementsReturnsEntitlementsForPluginUnnamedModule() { // Any class from the current module (unnamed) will do var callerClass = this.getClass(); - var entitlements = policyManager.getEntitlements(callerClass, Entitlement.class); + var entitlements = policyManager.getEntitlements(callerClass); assertThat(entitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); } @@ -130,11 +126,7 @@ public void testGetEntitlementsThrowsOnMissingPolicyForServer() throws ClassNotF var mockServerClass = ModuleLayer.boot().findLoader("jdk.httpserver").loadClass("com.sun.net.httpserver.HttpServer"); var requestingModule = mockServerClass.getModule(); - assertEquals( - "No policy for this module in server", - ModuleEntitlements.NONE, - policyManager.getEntitlements(mockServerClass, Entitlement.class) - ); + assertEquals("No policy for this module in server", ModuleEntitlements.NONE, policyManager.getEntitlements(mockServerClass)); assertEquals(Map.of(requestingModule, ModuleEntitlements.NONE), policyManager.moduleEntitlementsMap); } @@ -154,7 +146,7 @@ public void testGetEntitlementsReturnsEntitlementsForServerModule() throws Class // loaded too early) to mimic a class that would be in the server module. var mockServerClass = ModuleLayer.boot().findLoader("jdk.httpserver").loadClass("com.sun.net.httpserver.HttpServer"); - var entitlements = policyManager.getEntitlements(mockServerClass, Entitlement.class); + var entitlements = policyManager.getEntitlements(mockServerClass); assertThat(entitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); assertThat(entitlements.hasEntitlement(ExitVMEntitlement.class), is(true)); } @@ -175,7 +167,7 @@ public void testGetEntitlementsReturnsEntitlementsForPluginModule() throws IOExc var layer = createLayerForJar(jar, "org.example.plugin"); var mockPluginClass = layer.findLoader("org.example.plugin").loadClass("q.B"); - var entitlements = policyManager.getEntitlements(mockPluginClass, Entitlement.class); + var entitlements = policyManager.getEntitlements(mockPluginClass); assertThat(entitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); assertThat( entitlements.getEntitlements(FileEntitlement.class).toList(), @@ -195,11 +187,11 @@ public void testGetEntitlementsResultIsCached() { // Any class from the current module (unnamed) will do var callerClass = this.getClass(); - var entitlements = policyManager.getEntitlements(callerClass, Entitlement.class); + var entitlements = policyManager.getEntitlements(callerClass); assertThat(entitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); assertThat(policyManager.moduleEntitlementsMap, aMapWithSize(1)); var cachedResult = policyManager.moduleEntitlementsMap.values().stream().findFirst().orElseThrow(); - var entitlementsAgain = policyManager.getEntitlements(callerClass, Entitlement.class); + var entitlementsAgain = policyManager.getEntitlements(callerClass); // Nothing new in the map assertThat(policyManager.moduleEntitlementsMap, aMapWithSize(1)); diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java index 1e0c31d2280b8..3be350f9361fd 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyParserTests.java @@ -15,6 +15,7 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.List; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; @@ -55,15 +56,46 @@ public void testPolicyBuilderOnExternalPlugin() throws IOException { public void testParseNetwork() throws IOException { Policy parsedPolicy = new PolicyParser(new ByteArrayInputStream(""" entitlement-module-name: - - network: - actions: - - listen - - accept - - connect + - inbound_network """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", false).parsePolicy(); Policy expected = new Policy( "test-policy.yaml", - List.of(new Scope("entitlement-module-name", List.of(new NetworkEntitlement(List.of("listen", "accept", "connect"))))) + List.of(new Scope("entitlement-module-name", List.of(new InboundNetworkEntitlement()))) + ); + assertEquals(expected, parsedPolicy); + + parsedPolicy = new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - outbound_network + """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", false).parsePolicy(); + expected = new Policy("test-policy.yaml", List.of(new Scope("entitlement-module-name", List.of(new OutboundNetworkEntitlement())))); + assertEquals(expected, parsedPolicy); + + parsedPolicy = new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - outbound_network + - inbound_network + """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", false).parsePolicy(); + expected = new Policy( + "test-policy.yaml", + List.of(new Scope("entitlement-module-name", List.of(new OutboundNetworkEntitlement(), new InboundNetworkEntitlement()))) + ); + assertEquals(expected, parsedPolicy); + } + + public void testParseWriteSystemProperties() throws IOException { + Policy parsedPolicy = new PolicyParser(new ByteArrayInputStream(""" + entitlement-module-name: + - write_system_properties: + properties: + - es.property1 + - es.property2 + """.getBytes(StandardCharsets.UTF_8)), "test-policy.yaml", false).parsePolicy(); + Policy expected = new Policy( + "test-policy.yaml", + List.of( + new Scope("entitlement-module-name", List.of(new WriteSystemPropertiesEntitlement(Set.of("es.property1", "es.property2")))) + ) ); assertEquals(expected, parsedPolicy); } diff --git a/modules/apm/build.gradle b/modules/apm/build.gradle index 07e6c7a042135..de50d88a46d73 100644 --- a/modules/apm/build.gradle +++ b/modules/apm/build.gradle @@ -5,6 +5,7 @@ * 2.0. */ apply plugin: 'elasticsearch.internal-es-plugin' +apply plugin: 'elasticsearch.internal-java-rest-test' esplugin { name = 'apm' @@ -20,6 +21,9 @@ dependencies { implementation "io.opentelemetry:opentelemetry-context:${otelVersion}" implementation "io.opentelemetry:opentelemetry-semconv:${otelSemconvVersion}" runtimeOnly "co.elastic.apm:elastic-apm-agent:1.52.0" + + javaRestTestImplementation project(':modules:apm') + javaRestTestImplementation project(':test:framework') } tasks.named("dependencyLicenses").configure { diff --git a/modules/apm/src/javaRestTest/java/org/elasticsearch/telemetry/apm/ApmAgentSettingsIT.java b/modules/apm/src/javaRestTest/java/org/elasticsearch/telemetry/apm/ApmAgentSettingsIT.java new file mode 100644 index 0000000000000..ee26178723608 --- /dev/null +++ b/modules/apm/src/javaRestTest/java/org/elasticsearch/telemetry/apm/ApmAgentSettingsIT.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.telemetry.apm; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.telemetry.apm.internal.APMAgentSettings; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.junit.ClassRule; + +public class ApmAgentSettingsIT extends ESRestTestCase { + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .module("apm") + .systemProperty("es.entitlements.enabled", "true") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public void testChangingApmAgentSettingsIsAllowed() throws Exception { + var settings = Settings.builder().put("telemetry.metrics.enabled", true); + APMAgentSettings.PERMITTED_AGENT_KEYS.stream().forEach(key -> settings.put("telemetry.agent." + key, "value")); + updateClusterSettings(settings.build()); + + updateClusterSettings(Settings.builder().put("telemetry.metrics.enabled", false).build()); + } +} diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java index f66683a787bc0..9d4822aa9c4d6 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/APMAgentSettings.java @@ -81,6 +81,10 @@ public void initAgentSystemProperties(Settings settings) { /** * Copies a setting to the APM agent's system properties under elastic.apm, either * by setting the property if {@code value} has a value, or by deleting the property if it doesn't. + * + * All permitted agent properties must be covered by the write_system_properties entitlement, + * see the entitlement policy of this module! + * * @param key the config key to set, without any prefix * @param value the value to set, or null */ @@ -106,9 +110,11 @@ public void setAgentSetting(String key, String value) { /** * Allow-list of APM agent config keys users are permitted to configure. + *

WARNING: Make sure to update the module entitlements if permitting additional agent keys + *

* @see APM Java Agent Configuration */ - private static final Set PERMITTED_AGENT_KEYS = Set.of( + public static final Set PERMITTED_AGENT_KEYS = Set.of( // Circuit-Breaker: "circuit_breaker_enabled", "stress_monitoring_interval", diff --git a/modules/apm/src/main/plugin-metadata/entitlement-policy.yaml b/modules/apm/src/main/plugin-metadata/entitlement-policy.yaml index 9c10bafca42f9..9ebe5f3b8baea 100644 --- a/modules/apm/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/apm/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,4 +1,90 @@ org.elasticsearch.telemetry.apm: - create_class_loader + - write_system_properties: + properties: + - elastic.apm.recording + - elastic.apm.circuit_breaker_enabled + - elastic.apm.stress_monitoring_interval + - elastic.apm.stress_monitor_gc_stress_threshold + - elastic.apm.stress_monitor_gc_relief_threshold + - elastic.apm.stress_monitor_cpu_duration_threshold + - elastic.apm.stress_monitor_system_cpu_stress_threshold + - elastic.apm.stress_monitor_system_cpu_relief_threshold + - elastic.apm.service_name + - elastic.apm.service_node_name + - elastic.apm.hostname + - elastic.apm.environment + - elastic.apm.transaction_sample_rate + - elastic.apm.transaction_max_spans + - elastic.apm.long_field_max_length + - elastic.apm.sanitize_field_names + - elastic.apm.enable_instrumentations + - elastic.apm.disable_instrumentations + - elastic.apm.unnest_exceptions + - elastic.apm.ignore_exceptions + - elastic.apm.capture_body + - elastic.apm.capture_headers + - elastic.apm.global_labels + - elastic.apm.instrument_ancient_bytecode + - elastic.apm.context_propagation_only + - elastic.apm.classes_excluded_from_instrumentation + - elastic.apm.trace_methods + - elastic.apm.trace_methods_duration_threshold + - elastic.apm.breakdown_metrics + - elastic.apm.plugins_dir + - elastic.apm.use_elastic_traceparent_header + - elastic.apm.disable_outgoing_tracecontext_headers + - elastic.apm.span_min_duration + - elastic.apm.cloud_provider + - elastic.apm.enable_public_api_annotation_inheritance + - elastic.apm.transaction_name_groups + - elastic.apm.trace_continuation_strategy + - elastic.apm.baggage_to_attach + - elastic.apm.capture_body_content_types + - elastic.apm.transaction_ignore_urls + - elastic.apm.transaction_ignore_user_agents + - elastic.apm.use_path_as_transaction_name + - elastic.apm.span_compression_enabled + - elastic.apm.span_compression_exact_match_max_duration + - elastic.apm.span_compression_same_kind_max_duration + - elastic.apm.exit_span_min_duration + - elastic.apm.capture_jmx_metrics + - elastic.apm.log_level + - elastic.apm.log_ecs_reformatting + - elastic.apm.log_ecs_reformatting_additional_fields + - elastic.apm.log_ecs_formatter_allow_list + - elastic.apm.log_file_size + - elastic.apm.log_sending + - elastic.apm.dedot_custom_metrics + - elastic.apm.custom_metrics_histogram_boundaries + - elastic.apm.metric_set_limit + - elastic.apm.agent_reporter_health_metrics + - elastic.apm.agent_background_overhead_metrics + - elastic.apm.profiling_inferred_spans_enabled + - elastic.apm.profiling_inferred_spans_logging_enabled + - elastic.apm.profiling_inferred_spans_sampling_interval + - elastic.apm.profiling_inferred_spans_min_duration + - elastic.apm.profiling_inferred_spans_included_classes + - elastic.apm.profiling_inferred_spans_excluded_classes + - elastic.apm.profiling_inferred_spans_lib_directory + - elastic.apm.server_url + - elastic.apm.server_urls + - elastic.apm.disable_send + - elastic.apm.server_timeout + - elastic.apm.verify_server_cert + - elastic.apm.max_queue_size + - elastic.apm.include_process_args + - elastic.apm.api_request_time + - elastic.apm.api_request_size + - elastic.apm.metrics_interval + - elastic.apm.disable_metrics + - elastic.apm.aws_lambda_handler + - elastic.apm.data_flush_timeout + - elastic.apm.application_packages + - elastic.apm.stack_trace_limit + - elastic.apm.span_stack_trace_min_duration elastic.apm.agent: - set_https_connection_properties + - write_system_properties: + properties: + - AsyncProfiler.safemode diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java index 839ac9c7653e4..7c04c38eff63c 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamIT.java @@ -1925,7 +1925,7 @@ public ClusterState execute(ClusterState currentState) throws Exception { DataStream original = currentState.getMetadata().dataStreams().get(dataStreamName); DataStream broken = original.copy() .setBackingIndices( - original.getBackingIndices() + original.getDataComponent() .copy() .setIndices( List.of(new Index(original.getIndices().get(0).getName(), "broken"), original.getIndices().get(1)) diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java index 40bde501f0bfd..0ae7504bb9d7f 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java @@ -148,7 +148,7 @@ public void setup() throws Exception { dsBackingIndexName = dataStreamInfos.get(0).getDataStream().getIndices().get(0).getName(); otherDsBackingIndexName = dataStreamInfos.get(1).getDataStream().getIndices().get(0).getName(); fsBackingIndexName = dataStreamInfos.get(2).getDataStream().getIndices().get(0).getName(); - fsFailureIndexName = dataStreamInfos.get(2).getDataStream().getFailureIndices().getIndices().get(0).getName(); + fsFailureIndexName = dataStreamInfos.get(2).getDataStream().getFailureIndices().get(0).getName(); // Will be used in some tests, to test renaming while restoring a snapshot: ds2BackingIndexName = dsBackingIndexName.replace("-ds-", "-ds2-"); @@ -279,7 +279,7 @@ public void testSnapshotAndRestoreAllDataStreamsInPlace() throws Exception { assertThat(backingIndices.stream().map(Index::getName).collect(Collectors.toList()), contains(otherDsBackingIndexName)); backingIndices = dataStreamInfos.get(2).getDataStream().getIndices(); assertThat(backingIndices.stream().map(Index::getName).collect(Collectors.toList()), contains(fsBackingIndexName)); - List failureIndices = dataStreamInfos.get(2).getDataStream().getFailureIndices().getIndices(); + List failureIndices = dataStreamInfos.get(2).getDataStream().getFailureIndices(); assertThat(failureIndices.stream().map(Index::getName).collect(Collectors.toList()), contains(fsFailureIndexName)); } @@ -375,7 +375,7 @@ public void testFailureStoreSnapshotAndRestore() throws Exception { assertEquals(1, dataStreamInfos.size()); assertEquals(1, dataStreamInfos.get(0).getDataStream().getIndices().size()); assertEquals(fsBackingIndexName, dataStreamInfos.get(0).getDataStream().getIndices().get(0).getName()); - assertEquals(fsFailureIndexName, dataStreamInfos.get(0).getDataStream().getFailureIndices().getIndices().get(0).getName()); + assertEquals(fsFailureIndexName, dataStreamInfos.get(0).getDataStream().getFailureIndices().get(0).getName()); } { // With rename pattern @@ -394,7 +394,7 @@ public void testFailureStoreSnapshotAndRestore() throws Exception { assertEquals(1, dataStreamInfos.size()); assertEquals(1, dataStreamInfos.get(0).getDataStream().getIndices().size()); assertEquals(fs2BackingIndexName, dataStreamInfos.get(0).getDataStream().getIndices().get(0).getName()); - assertEquals(fs2FailureIndexName, dataStreamInfos.get(0).getDataStream().getFailureIndices().getIndices().get(0).getName()); + assertEquals(fs2FailureIndexName, dataStreamInfos.get(0).getDataStream().getFailureIndices().get(0).getName()); } } @@ -587,8 +587,8 @@ public void testSnapshotAndRestoreAll() throws Exception { assertEquals(otherDsBackingIndexName, dataStreamInfos.get(1).getDataStream().getIndices().get(0).getName()); assertEquals(1, dataStreamInfos.get(2).getDataStream().getIndices().size()); assertEquals(fsBackingIndexName, dataStreamInfos.get(2).getDataStream().getIndices().get(0).getName()); - assertEquals(1, dataStreamInfos.get(2).getDataStream().getFailureIndices().getIndices().size()); - assertEquals(fsFailureIndexName, dataStreamInfos.get(2).getDataStream().getFailureIndices().getIndices().get(0).getName()); + assertEquals(1, dataStreamInfos.get(2).getDataStream().getFailureIndices().size()); + assertEquals(fsFailureIndexName, dataStreamInfos.get(2).getDataStream().getFailureIndices().get(0).getName()); GetAliasesResponse getAliasesResponse = client.admin() .indices() @@ -659,7 +659,7 @@ public void testSnapshotAndRestoreIncludeAliasesFalse() throws Exception { assertEquals(1, dataStreamInfos.get(2).getDataStream().getIndices().size()); assertEquals(fsBackingIndexName, dataStreamInfos.get(2).getDataStream().getIndices().get(0).getName()); assertEquals(1, dataStreamInfos.get(2).getDataStream().getIndices().size()); - assertEquals(fsFailureIndexName, dataStreamInfos.get(2).getDataStream().getFailureIndices().getIndices().get(0).getName()); + assertEquals(fsFailureIndexName, dataStreamInfos.get(2).getDataStream().getFailureIndices().get(0).getName()); GetAliasesResponse getAliasesResponse = client.admin() .indices() @@ -1257,8 +1257,8 @@ public void testExcludeDSFromSnapshotWhenExcludingAnyOfItsIndices() { assertThat(restoreSnapshotResponse.failedShards(), is(0)); GetDataStreamAction.Response.DataStreamInfo dataStream = getDataStreamInfo(dataStreamName).getFirst(); - assertThat(dataStream.getDataStream().getBackingIndices().getIndices(), not(empty())); - assertThat(dataStream.getDataStream().getFailureIndices().getIndices(), empty()); + assertThat(dataStream.getDataStream().getDataComponent().getIndices(), not(empty())); + assertThat(dataStream.getDataStream().getFailureIndices(), empty()); } } diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LazyRolloverDuringDisruptionIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LazyRolloverDuringDisruptionIT.java index 00dfd5c65b126..448f5356ff073 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LazyRolloverDuringDisruptionIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LazyRolloverDuringDisruptionIT.java @@ -58,7 +58,7 @@ public void testRolloverIsExecutedOnce() throws ExecutionException, InterruptedE // Verify that the data stream is marked for rollover and that it has currently one index DataStream dataStream = getDataStream(dataStreamName); assertThat(dataStream.rolloverOnWrite(), equalTo(true)); - assertThat(dataStream.getBackingIndices().getIndices().size(), equalTo(1)); + assertThat(dataStream.getDataComponent().getIndices().size(), equalTo(1)); // Introduce a disruption to the master node that should delay the rollover execution final var barrier = new CyclicBarrier(2); @@ -107,7 +107,7 @@ public void onFailure(Exception e) { // Verify that the rollover has happened once dataStream = getDataStream(dataStreamName); assertThat(dataStream.rolloverOnWrite(), equalTo(false)); - assertThat(dataStream.getBackingIndices().getIndices().size(), equalTo(2)); + assertThat(dataStream.getDataComponent().getIndices().size(), equalTo(2)); } private DataStream getDataStream(String dataStreamName) { diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java index cf8380a5c859c..0bc4157eb2e48 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/LogsDataStreamIT.java @@ -285,7 +285,7 @@ private void assertDataStreamBackingIndicesModes(final String dataStreamName, fi final GetDataStreamAction.Response getDataStreamResponse = client().execute(GetDataStreamAction.INSTANCE, getDataStreamRequest) .actionGet(); final DataStream dataStream = getDataStreamResponse.getDataStreams().get(0).getDataStream(); - final DataStream.DataStreamIndices backingIndices = dataStream.getBackingIndices(); + final DataStream.DataStreamIndices backingIndices = dataStream.getDataComponent(); final Iterator indexModesIterator = modes.iterator(); assertThat(backingIndices.getIndices().size(), Matchers.equalTo(modes.size())); for (final Index index : backingIndices.getIndices()) { diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java index 19067d85a6805..1c00e3ad380dc 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceIT.java @@ -1084,7 +1084,7 @@ public void testLifecycleAppliedToFailureStore() throws Exception { assertThat(getDataStreamResponse.getDataStreams().get(0).getDataStream().getName(), equalTo(dataStreamName)); List backingIndices = getDataStreamResponse.getDataStreams().get(0).getDataStream().getIndices(); assertThat(backingIndices.size(), equalTo(1)); - List failureIndices = getDataStreamResponse.getDataStreams().get(0).getDataStream().getFailureIndices().getIndices(); + List failureIndices = getDataStreamResponse.getDataStreams().get(0).getDataStream().getFailureIndices(); assertThat(failureIndices.size(), equalTo(2)); }); @@ -1129,7 +1129,7 @@ public void testLifecycleAppliedToFailureStore() throws Exception { assertThat(getDataStreamResponse.getDataStreams().get(0).getDataStream().getName(), equalTo(dataStreamName)); List backingIndices = getDataStreamResponse.getDataStreams().get(0).getDataStream().getIndices(); assertThat(backingIndices.size(), equalTo(1)); - List failureIndices = getDataStreamResponse.getDataStreams().get(0).getDataStream().getFailureIndices().getIndices(); + List failureIndices = getDataStreamResponse.getDataStreams().get(0).getDataStream().getFailureIndices(); assertThat(failureIndices.size(), equalTo(1)); assertThat(failureIndices.get(0).getName(), equalTo(secondGenerationIndex)); }); @@ -1156,14 +1156,7 @@ private static List getFailureIndices(String dataStreamName) { .actionGet(); assertThat(getDataStreamResponse.getDataStreams().size(), equalTo(1)); assertThat(getDataStreamResponse.getDataStreams().get(0).getDataStream().getName(), equalTo(dataStreamName)); - return getDataStreamResponse.getDataStreams() - .get(0) - .getDataStream() - .getFailureIndices() - .getIndices() - .stream() - .map(Index::getName) - .toList(); + return getDataStreamResponse.getDataStreams().get(0).getDataStream().getFailureIndices().stream().map(Index::getName).toList(); } static void indexDocs(String dataStream, int numDocs) { diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java index cc5e00d8283ad..c6bee86e20fcd 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java @@ -171,11 +171,11 @@ protected DataStreamsStatsAction.DataStreamShardStats readShardResult(StreamInpu if (indexAbstraction.getType() == IndexAbstraction.Type.DATA_STREAM) { DataStream dataStream = (DataStream) indexAbstraction; AggregatedStats stats = aggregatedDataStreamsStats.computeIfAbsent(dataStream.getName(), s -> new AggregatedStats()); - dataStream.getBackingIndices().getIndices().stream().map(Index::getName).forEach(index -> { + dataStream.getIndices().stream().map(Index::getName).forEach(index -> { stats.backingIndices.add(index); allBackingIndices.add(index); }); - dataStream.getFailureIndices().getIndices().stream().map(Index::getName).forEach(index -> { + dataStream.getFailureIndices().stream().map(Index::getName).forEach(index -> { stats.backingIndices.add(index); allBackingIndices.add(index); }); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java index c770b84ee98a3..769e4db419606 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DeleteDataStreamTransportAction.java @@ -156,7 +156,7 @@ static ClusterState removeDataStream( DataStream dataStream = currentState.metadata().dataStreams().get(dataStreamName); assert dataStream != null; backingIndicesToRemove.addAll(dataStream.getIndices()); - backingIndicesToRemove.addAll(dataStream.getFailureIndices().getIndices()); + backingIndicesToRemove.addAll(dataStream.getFailureIndices()); } // first delete the data streams and then the indices: diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/TransportGetDataStreamsAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/TransportGetDataStreamsAction.java index 2d310fef0be7e..25d679f91b83e 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/TransportGetDataStreamsAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/TransportGetDataStreamsAction.java @@ -207,8 +207,8 @@ static GetDataStreamAction.Response innerOperation( Map backingIndicesSettingsValues = new HashMap<>(); Metadata metadata = state.getMetadata(); collectIndexSettingsValues(dataStream, backingIndicesSettingsValues, metadata, dataStream.getIndices()); - if (DataStream.isFailureStoreFeatureFlagEnabled() && dataStream.getFailureIndices().getIndices().isEmpty() == false) { - collectIndexSettingsValues(dataStream, backingIndicesSettingsValues, metadata, dataStream.getFailureIndices().getIndices()); + if (DataStream.isFailureStoreFeatureFlagEnabled() && dataStream.getFailureIndices().isEmpty() == false) { + collectIndexSettingsValues(dataStream, backingIndicesSettingsValues, metadata, dataStream.getFailureIndices()); } GetDataStreamAction.Response.TimeSeries timeSeries = null; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java index 7de3f180753f8..e993460795e16 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleService.java @@ -762,10 +762,8 @@ static List getTargetIndices( targetIndices.add(index); } } - if (withFailureStore - && DataStream.isFailureStoreFeatureFlagEnabled() - && dataStream.getFailureIndices().getIndices().isEmpty() == false) { - for (Index index : dataStream.getFailureIndices().getIndices()) { + if (withFailureStore && DataStream.isFailureStoreFeatureFlagEnabled() && dataStream.getFailureIndices().isEmpty() == false) { + for (Index index : dataStream.getFailureIndices()) { if (dataStream.isIndexManagedByDataStreamLifecycle(index, indexMetadataSupplier) && indicesToExcludeForRemainingRun.contains(index) == false) { targetIndices.add(index); @@ -820,7 +818,7 @@ private Set maybeExecuteRollover(ClusterState state, DataStream dataStrea @Nullable private Index maybeExecuteRollover(ClusterState state, DataStream dataStream, boolean rolloverFailureStore) { - Index currentRunWriteIndex = rolloverFailureStore ? dataStream.getFailureStoreWriteIndex() : dataStream.getWriteIndex(); + Index currentRunWriteIndex = rolloverFailureStore ? dataStream.getWriteFailureIndex() : dataStream.getWriteIndex(); if (currentRunWriteIndex == null) { return null; } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java index f05fe518fa56d..8378526e6bdae 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java @@ -141,7 +141,7 @@ public void testUpdateTimeSeriesTemporalRange_NoUpdateBecauseReplicated() { ).getMetadata(); DataStream d = metadata.dataStreams().get(dataStreamName); metadata = Metadata.builder(metadata) - .put(d.copy().setReplicated(true).setBackingIndices(d.getBackingIndices().copy().setRolloverOnWrite(false).build()).build()) + .put(d.copy().setReplicated(true).setBackingIndices(d.getDataComponent().copy().setRolloverOnWrite(false).build()).build()) .build(); now = now.plus(1, ChronoUnit.HOURS); diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java index 0bb990e544892..55afb228da2c0 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/lifecycle/DataStreamLifecycleServiceTests.java @@ -230,7 +230,7 @@ public void testOperationsExecutedOnce() { .toList(); assertThat(deleteRequests.get(0).indices()[0], is(dataStream.getIndices().get(0).getName())); assertThat(deleteRequests.get(1).indices()[0], is(dataStream.getIndices().get(1).getName())); - assertThat(deleteRequests.get(2).indices()[0], is(dataStream.getFailureIndices().getIndices().get(0).getName())); + assertThat(deleteRequests.get(2).indices()[0], is(dataStream.getFailureIndices().get(0).getName())); // on the second run the rollover and delete requests should not execute anymore // i.e. the count should *remain* 1 for rollover and 2 for deletes @@ -1495,7 +1495,7 @@ public void testTargetIndices() { ).copy().setDataStreamOptions(dataStreamOptions).build(); // failure store is managed even when disabled builder.put(dataStream); Metadata metadata = builder.build(); - Set indicesToExclude = Set.of(dataStream.getIndices().get(0), dataStream.getFailureIndices().getIndices().get(0)); + Set indicesToExclude = Set.of(dataStream.getIndices().get(0), dataStream.getFailureIndices().get(0)); List targetBackingIndicesOnly = DataStreamLifecycleService.getTargetIndices( dataStream, indicesToExclude, @@ -1506,9 +1506,7 @@ public void testTargetIndices() { List targetIndices = DataStreamLifecycleService.getTargetIndices(dataStream, indicesToExclude, metadata::index, true); assertThat( targetIndices, - equalTo( - List.of(dataStream.getIndices().get(1), dataStream.getIndices().get(2), dataStream.getFailureIndices().getIndices().get(1)) - ) + equalTo(List.of(dataStream.getIndices().get(1), dataStream.getIndices().get(2), dataStream.getFailureIndices().get(1))) ); } @@ -1540,10 +1538,7 @@ public void testFailureStoreIsManagedEvenWhenDisabled() { rolloverFailureIndexRequest.getRolloverTarget(), is(IndexNameExpressionResolver.combineSelector(dataStreamName, IndexComponentSelector.FAILURES)) ); - assertThat( - ((DeleteIndexRequest) clientSeenRequests.get(2)).indices()[0], - is(dataStream.getFailureIndices().getIndices().get(0).getName()) - ); + assertThat(((DeleteIndexRequest) clientSeenRequests.get(2)).indices()[0], is(dataStream.getFailureIndices().get(0).getName())); } public void testMaybeExecuteRetentionSuccessfulDownsampledIndex() { diff --git a/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml b/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 0000000000000..df557f9944253 --- /dev/null +++ b/modules/reindex/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,2 @@ +ALL-UNNAMED: + - outbound_network diff --git a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java index 7848422b869df..a9082e5373e90 100644 --- a/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java +++ b/modules/repository-azure/src/internalClusterTest/java/org/elasticsearch/repositories/azure/AzureBlobStoreRepositoryMetricsTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.repositories.azure; +import com.sun.net.httpserver.Headers; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; @@ -21,6 +22,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.http.ResponseInjectingHttpHandler; import org.elasticsearch.plugins.PluginsService; import org.elasticsearch.repositories.RepositoriesMetrics; import org.elasticsearch.repositories.RepositoriesService; @@ -46,7 +48,6 @@ import java.util.stream.IntStream; import static org.elasticsearch.repositories.azure.AbstractAzureServerTestCase.randomBlobContent; -import static org.elasticsearch.repositories.azure.ResponseInjectingAzureHttpHandler.createFailNRequestsHandler; import static org.elasticsearch.repositories.blobstore.BlobStoreTestUtil.randomPurpose; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.hasSize; @@ -60,7 +61,7 @@ public class AzureBlobStoreRepositoryMetricsTests extends AzureBlobStoreReposito ); private static final int MAX_RETRIES = 3; - private final Queue requestHandlers = new ConcurrentLinkedQueue<>(); + private final Queue requestHandlers = new ConcurrentLinkedQueue<>(); @Override protected Map createHttpHandlers() { @@ -68,7 +69,7 @@ protected Map createHttpHandlers() { assert httpHandlers.size() == 1 : "This assumes there's a single handler"; return httpHandlers.entrySet() .stream() - .collect(Collectors.toMap(Map.Entry::getKey, e -> new ResponseInjectingAzureHttpHandler(requestHandlers, e.getValue()))); + .collect(Collectors.toMap(Map.Entry::getKey, e -> new ResponseInjectingHttpHandler(requestHandlers, e.getValue()))); } /** @@ -106,7 +107,7 @@ public void testThrottleResponsesAreCountedInMetrics() throws IOException { // Queue up some throttle responses final int numThrottles = randomIntBetween(1, MAX_RETRIES); IntStream.range(0, numThrottles) - .forEach(i -> requestHandlers.offer(new ResponseInjectingAzureHttpHandler.FixedRequestHandler(RestStatus.TOO_MANY_REQUESTS))); + .forEach(i -> requestHandlers.offer(new ResponseInjectingHttpHandler.FixedRequestHandler(RestStatus.TOO_MANY_REQUESTS))); // Check that the blob exists blobContainer.blobExists(purpose, blobName); @@ -132,11 +133,7 @@ public void testRangeNotSatisfiedAreCountedInMetrics() throws IOException { // Queue up a range-not-satisfied error requestHandlers.offer( - new ResponseInjectingAzureHttpHandler.FixedRequestHandler( - RestStatus.REQUESTED_RANGE_NOT_SATISFIED, - null, - GET_BLOB_REQUEST_PREDICATE - ) + new ResponseInjectingHttpHandler.FixedRequestHandler(RestStatus.REQUESTED_RANGE_NOT_SATISFIED, null, GET_BLOB_REQUEST_PREDICATE) ); // Attempt to read the blob @@ -169,7 +166,7 @@ public void testErrorResponsesAreCountedInMetrics() throws IOException { if (status == RestStatus.TOO_MANY_REQUESTS) { throttles.incrementAndGet(); } - requestHandlers.offer(new ResponseInjectingAzureHttpHandler.FixedRequestHandler(status)); + requestHandlers.offer(new ResponseInjectingHttpHandler.FixedRequestHandler(status)); }); // Check that the blob exists @@ -265,7 +262,7 @@ public void testBatchDeleteFailure() throws IOException { clearMetrics(dataNodeName); // Handler will fail one or more of the batch requests - final ResponseInjectingAzureHttpHandler.RequestHandler failNRequestRequestHandler = createFailNRequestsHandler(failedBatches); + final ResponseInjectingHttpHandler.RequestHandler failNRequestRequestHandler = createFailNRequestsHandler(failedBatches); // Exhaust the retries IntStream.range(0, (numberOfBatches - failedBatches) + (failedBatches * (MAX_RETRIES + 1))) @@ -308,6 +305,35 @@ private MetricsAsserter metricsAsserter( return new MetricsAsserter(dataNodeName, operationPurpose, operation, repository); } + /** + * Creates a {@link ResponseInjectingHttpHandler.RequestHandler} that will persistently fail the first numberToFail + * distinct requests it sees. Any other requests are passed through to the delegate. + * + * @param numberToFail The number of requests to fail + * @return the handler + */ + private static ResponseInjectingHttpHandler.RequestHandler createFailNRequestsHandler(int numberToFail) { + final List requestsToFail = new ArrayList<>(numberToFail); + return (exchange, delegate) -> { + final Headers requestHeaders = exchange.getRequestHeaders(); + final String requestId = requestHeaders.get("X-ms-client-request-id").get(0); + boolean failRequest = false; + synchronized (requestsToFail) { + if (requestsToFail.contains(requestId)) { + failRequest = true; + } else if (requestsToFail.size() < numberToFail) { + requestsToFail.add(requestId); + failRequest = true; + } + } + if (failRequest) { + exchange.sendResponseHeaders(500, -1); + } else { + delegate.handle(exchange); + } + }; + } + private class MetricsAsserter { private final String dataNodeName; private final OperationPurpose purpose; diff --git a/modules/repository-azure/src/main/plugin-metadata/entitlement-policy.yaml b/modules/repository-azure/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 0000000000000..74197fb3ed9ae --- /dev/null +++ b/modules/repository-azure/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,2 @@ +io.netty.common: + - outbound_network diff --git a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerStatsTests.java b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerStatsTests.java index 8979507230bdd..56f7ee123a10f 100644 --- a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerStatsTests.java +++ b/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureBlobContainerStatsTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.blobstore.OperationPurpose; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.http.ResponseInjectingHttpHandler; import org.elasticsearch.rest.RestStatus; import org.junit.Before; @@ -34,14 +35,14 @@ public class AzureBlobContainerStatsTests extends AbstractAzureServerTestCase { - private final Queue requestHandlers = new ConcurrentLinkedQueue<>(); + private final Queue requestHandlers = new ConcurrentLinkedQueue<>(); @SuppressForbidden(reason = "use a http server") @Before public void configureAzureHandler() { httpServer.createContext( "/", - new ResponseInjectingAzureHttpHandler( + new ResponseInjectingHttpHandler( requestHandlers, new AzureHttpHandler(ACCOUNT, CONTAINER, null, MockAzureBlobStore.LeaseExpiryPredicate.NEVER_EXPIRE) ) @@ -61,7 +62,7 @@ public void testRetriesAndOperationsAreTrackedSeparately() throws IOException { for (int i = 0; i < randomIntBetween(10, 50); i++) { final boolean triggerRetry = randomBoolean(); if (triggerRetry) { - requestHandlers.offer(new ResponseInjectingAzureHttpHandler.FixedRequestHandler(RestStatus.TOO_MANY_REQUESTS)); + requestHandlers.offer(new ResponseInjectingHttpHandler.FixedRequestHandler(RestStatus.TOO_MANY_REQUESTS)); } final AzureBlobStore.Operation operation = randomFrom(supportedOperations); switch (operation) { diff --git a/modules/repository-gcs/src/internalClusterTest/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/modules/repository-gcs/src/internalClusterTest/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 6505b7234966b..1adc380216529 100644 --- a/modules/repository-gcs/src/internalClusterTest/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/modules/repository-gcs/src/internalClusterTest/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -25,6 +25,7 @@ import org.apache.lucene.util.BytesRefBuilder; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; @@ -268,7 +269,8 @@ protected GoogleCloudStorageBlobStore createBlobStore() { metadata.name(), storageService, bigArrays, - randomIntBetween(1, 8) * 1024 + randomIntBetween(1, 8) * 1024, + BackoffPolicy.noBackoff() ) { @Override long getLargeBlobThresholdInBytes() { diff --git a/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java b/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java index 6284129c0825c..48192e9173ffa 100644 --- a/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java +++ b/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStore.java @@ -24,6 +24,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; @@ -41,6 +42,7 @@ import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Streams; import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; import java.io.ByteArrayInputStream; @@ -105,6 +107,7 @@ class GoogleCloudStorageBlobStore implements BlobStore { private final GoogleCloudStorageOperationsStats stats; private final int bufferSize; private final BigArrays bigArrays; + private final BackoffPolicy casBackoffPolicy; GoogleCloudStorageBlobStore( String bucketName, @@ -112,7 +115,8 @@ class GoogleCloudStorageBlobStore implements BlobStore { String repositoryName, GoogleCloudStorageService storageService, BigArrays bigArrays, - int bufferSize + int bufferSize, + BackoffPolicy casBackoffPolicy ) { this.bucketName = bucketName; this.clientName = clientName; @@ -121,6 +125,7 @@ class GoogleCloudStorageBlobStore implements BlobStore { this.bigArrays = bigArrays; this.stats = new GoogleCloudStorageOperationsStats(bucketName); this.bufferSize = bufferSize; + this.casBackoffPolicy = casBackoffPolicy; } private Storage client() throws IOException { @@ -691,28 +696,46 @@ OptionalBytesReference compareAndExchangeRegister( .setMd5(Base64.getEncoder().encodeToString(MessageDigests.digest(updated, MessageDigests.md5()))) .build(); final var bytesRef = updated.toBytesRef(); - try { - SocketAccess.doPrivilegedVoidIOException( - () -> client().create( - blobInfo, - bytesRef.bytes, - bytesRef.offset, - bytesRef.length, - Storage.BlobTargetOption.generationMatch() - ) - ); - } catch (Exception e) { - final var serviceException = unwrapServiceException(e); - if (serviceException != null) { + + final Iterator retries = casBackoffPolicy.iterator(); + BaseServiceException finalException = null; + while (true) { + try { + SocketAccess.doPrivilegedVoidIOException( + () -> client().create( + blobInfo, + bytesRef.bytes, + bytesRef.offset, + bytesRef.length, + Storage.BlobTargetOption.generationMatch() + ) + ); + return OptionalBytesReference.of(expected); + } catch (Exception e) { + final var serviceException = unwrapServiceException(e); + if (serviceException == null) { + throw e; + } final var statusCode = serviceException.getCode(); - if (statusCode == RestStatus.PRECONDITION_FAILED.getStatus() || statusCode == RestStatus.TOO_MANY_REQUESTS.getStatus()) { + if (statusCode == RestStatus.PRECONDITION_FAILED.getStatus()) { return OptionalBytesReference.MISSING; } + if (statusCode == RestStatus.TOO_MANY_REQUESTS.getStatus()) { + finalException = ExceptionsHelper.useOrSuppress(finalException, serviceException); + if (retries.hasNext()) { + try { + // noinspection BusyWait + Thread.sleep(retries.next().millis()); + } catch (InterruptedException iex) { + Thread.currentThread().interrupt(); + finalException.addSuppressed(iex); + } + } else { + throw finalException; + } + } } - throw e; } - - return OptionalBytesReference.of(expected); } private static BaseServiceException unwrapServiceException(Throwable t) { diff --git a/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java b/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java index 36944e61d9c18..16233d3b391d7 100644 --- a/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java +++ b/modules/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -13,12 +13,14 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.cluster.metadata.RepositoryMetadata; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.recovery.RecoverySettings; import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.blobstore.MeteredBlobStoreRepository; @@ -56,10 +58,33 @@ class GoogleCloudStorageRepository extends MeteredBlobStoreRepository { ); static final Setting CLIENT_NAME = Setting.simpleString("client", "default"); + /** + * We will retry CASes that fail due to throttling. We use an {@link BackoffPolicy#linearBackoff(TimeValue, int, TimeValue)} + * with the following parameters + */ + static final Setting RETRY_THROTTLED_CAS_DELAY_INCREMENT = Setting.timeSetting( + "throttled_cas_retry.delay_increment", + TimeValue.timeValueMillis(100), + TimeValue.ZERO + ); + static final Setting RETRY_THROTTLED_CAS_MAX_NUMBER_OF_RETRIES = Setting.intSetting( + "throttled_cas_retry.maximum_number_of_retries", + 2, + 0 + ); + static final Setting RETRY_THROTTLED_CAS_MAXIMUM_DELAY = Setting.timeSetting( + "throttled_cas_retry.maximum_delay", + TimeValue.timeValueSeconds(5), + TimeValue.ZERO + ); + private final GoogleCloudStorageService storageService; private final ByteSizeValue chunkSize; private final String bucket; private final String clientName; + private final TimeValue retryThrottledCasDelayIncrement; + private final int retryThrottledCasMaxNumberOfRetries; + private final TimeValue retryThrottledCasMaxDelay; GoogleCloudStorageRepository( final RepositoryMetadata metadata, @@ -83,6 +108,9 @@ class GoogleCloudStorageRepository extends MeteredBlobStoreRepository { this.chunkSize = getSetting(CHUNK_SIZE, metadata); this.bucket = getSetting(BUCKET, metadata); this.clientName = CLIENT_NAME.get(metadata.settings()); + this.retryThrottledCasDelayIncrement = RETRY_THROTTLED_CAS_DELAY_INCREMENT.get(metadata.settings()); + this.retryThrottledCasMaxNumberOfRetries = RETRY_THROTTLED_CAS_MAX_NUMBER_OF_RETRIES.get(metadata.settings()); + this.retryThrottledCasMaxDelay = RETRY_THROTTLED_CAS_MAXIMUM_DELAY.get(metadata.settings()); logger.debug("using bucket [{}], base_path [{}], chunk_size [{}], compress [{}]", bucket, basePath(), chunkSize, isCompress()); } @@ -105,7 +133,15 @@ private static Map buildLocation(RepositoryMetadata metadata) { @Override protected GoogleCloudStorageBlobStore createBlobStore() { - return new GoogleCloudStorageBlobStore(bucket, clientName, metadata.name(), storageService, bigArrays, bufferSize); + return new GoogleCloudStorageBlobStore( + bucket, + clientName, + metadata.name(), + storageService, + bigArrays, + bufferSize, + BackoffPolicy.linearBackoff(retryThrottledCasDelayIncrement, retryThrottledCasMaxNumberOfRetries, retryThrottledCasMaxDelay) + ); } @Override diff --git a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java index 5700fa6de63fa..96db51a060f4c 100644 --- a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java +++ b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobContainerRetriesTests.java @@ -9,6 +9,7 @@ package org.elasticsearch.repositories.gcs; import fixture.gcs.FakeOAuth2HttpHandler; +import fixture.gcs.GoogleCloudStorageHttpHandler; import com.google.api.gax.retrying.RetrySettings; import com.google.cloud.http.HttpTransportOptions; @@ -18,10 +19,13 @@ import com.sun.net.httpserver.HttpHandler; import org.apache.http.HttpStatus; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; +import org.elasticsearch.common.blobstore.OptionalBytesReference; +import org.elasticsearch.common.blobstore.support.BlobContainerUtils; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.Streams; @@ -37,6 +41,7 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.http.ResponseInjectingHttpHandler; import org.elasticsearch.repositories.blobstore.AbstractBlobContainerRetriesTestCase; import org.elasticsearch.repositories.blobstore.ESMockAPIBasedRepositoryIntegTestCase; import org.elasticsearch.rest.RestStatus; @@ -55,6 +60,8 @@ import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.Queue; +import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; @@ -158,7 +165,8 @@ StorageOptions createStorageOptions( "repo", service, BigArrays.NON_RECYCLING_INSTANCE, - randomIntBetween(1, 8) * 1024 + randomIntBetween(1, 8) * 1024, + BackoffPolicy.linearBackoff(TimeValue.timeValueMillis(1), 3, TimeValue.timeValueSeconds(1)) ); return new GoogleCloudStorageBlobContainer(randomBoolean() ? BlobPath.EMPTY : BlobPath.EMPTY.add("foo"), blobStore); @@ -463,6 +471,41 @@ public String next() { } } + public void testCompareAndExchangeWhenThrottled() throws IOException { + final Queue requestHandlers = new ConcurrentLinkedQueue<>(); + httpServer.createContext("/", new ResponseInjectingHttpHandler(requestHandlers, new GoogleCloudStorageHttpHandler("bucket"))); + + final int maxRetries = randomIntBetween(1, 3); + final BlobContainer container = createBlobContainer(maxRetries, null, null, null); + final byte[] data = randomBytes(randomIntBetween(1, BlobContainerUtils.MAX_REGISTER_CONTENT_LENGTH)); + final String key = randomIdentifier(); + + final OptionalBytesReference createResult = safeAwait( + l -> container.compareAndExchangeRegister(randomPurpose(), key, BytesArray.EMPTY, new BytesArray(data), l) + ); + assertEquals(createResult, OptionalBytesReference.EMPTY); + + final byte[] updatedData = randomBytes(randomIntBetween(1, BlobContainerUtils.MAX_REGISTER_CONTENT_LENGTH)); + final int failuresToExhaustAttempts = maxRetries + 1; + final int numberOfThrottles = randomIntBetween(failuresToExhaustAttempts, (4 * failuresToExhaustAttempts) - 1); + for (int i = 0; i < numberOfThrottles; i++) { + requestHandlers.offer( + new ResponseInjectingHttpHandler.FixedRequestHandler( + RestStatus.TOO_MANY_REQUESTS, + null, + ex -> ex.getRequestURI().getPath().equals("/upload/storage/v1/b/bucket/o") && ex.getRequestMethod().equals("POST") + ) + ); + } + final OptionalBytesReference updateResult = safeAwait( + l -> container.compareAndExchangeRegister(randomPurpose(), key, new BytesArray(data), new BytesArray(updatedData), l) + ); + assertEquals(new BytesArray(data), updateResult.bytesReference()); + + assertEquals(0, requestHandlers.size()); + container.delete(randomPurpose()); + } + private HttpHandler safeHandler(HttpHandler handler) { final HttpHandler loggingHandler = ESMockAPIBasedRepositoryIntegTestCase.wrap(handler, logger); return exchange -> { diff --git a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java index 43724538aabea..81509c7f2183b 100644 --- a/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java +++ b/modules/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreContainerTests.java @@ -17,6 +17,7 @@ import com.google.cloud.storage.StorageBatchResult; import com.google.cloud.storage.StorageException; +import org.elasticsearch.common.BackoffPolicy; import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; @@ -87,7 +88,8 @@ public void testDeleteBlobsIgnoringIfNotExistsThrowsIOException() throws Excepti "repo", storageService, BigArrays.NON_RECYCLING_INSTANCE, - randomIntBetween(1, 8) * 1024 + randomIntBetween(1, 8) * 1024, + BackoffPolicy.noBackoff() ) ) { final BlobContainer container = store.blobContainer(BlobPath.EMPTY); diff --git a/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml b/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml index 4c42ec110a257..df557f9944253 100644 --- a/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/repository-s3/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,4 +1,2 @@ ALL-UNNAMED: - - network: - actions: - - connect + - outbound_network diff --git a/modules/repository-url/src/main/plugin-metadata/entitlement-policy.yaml b/modules/repository-url/src/main/plugin-metadata/entitlement-policy.yaml index f1dc1fc7755ef..081ac21f88864 100644 --- a/modules/repository-url/src/main/plugin-metadata/entitlement-policy.yaml +++ b/modules/repository-url/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,4 +1,2 @@ org.apache.httpcomponents.httpclient: - - network: - actions: - - connect # for URLHttpClient + - outbound_network # for URLHttpClient diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java index d825ec0a83f53..0158384b47aa4 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/http/netty4/Netty4IncrementalRequestHandlingIT.java @@ -54,7 +54,6 @@ import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.features.NodeFeature; import org.elasticsearch.http.HttpBodyTracer; -import org.elasticsearch.http.HttpHandlingSettings; import org.elasticsearch.http.HttpServerTransport; import org.elasticsearch.http.HttpTransportSettings; import org.elasticsearch.plugins.ActionPlugin; @@ -93,10 +92,15 @@ @ESIntegTestCase.ClusterScope(numDataNodes = 1) public class Netty4IncrementalRequestHandlingIT extends ESNetty4IntegTestCase { + private static final int MAX_CONTENT_LENGTH = ByteSizeUnit.MB.toIntBytes(50); + @Override protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal, otherSettings)); - builder.put(HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH.getKey(), ByteSizeValue.of(50, ByteSizeUnit.MB)); + builder.put( + HttpTransportSettings.SETTING_HTTP_MAX_CONTENT_LENGTH.getKey(), + ByteSizeValue.of(MAX_CONTENT_LENGTH, ByteSizeUnit.BYTES) + ); return builder.build(); } @@ -135,7 +139,7 @@ public void testReceiveAllChunks() throws Exception { var opaqueId = opaqueId(reqNo); // this dataset will be compared with one on server side - var dataSize = randomIntBetween(1024, maxContentLength()); + var dataSize = randomIntBetween(1024, MAX_CONTENT_LENGTH); var sendData = Unpooled.wrappedBuffer(randomByteArrayOfLength(dataSize)); sendData.retain(); ctx.clientChannel.writeAndFlush(fullHttpRequest(opaqueId, sendData)); @@ -243,7 +247,7 @@ public void testServerExceptionMidStream() throws Exception { public void testClientBackpressure() throws Exception { try (var ctx = setupClientCtx()) { var opaqueId = opaqueId(0); - var payloadSize = maxContentLength(); + var payloadSize = MAX_CONTENT_LENGTH; var totalParts = 10; var partSize = payloadSize / totalParts; ctx.clientChannel.writeAndFlush(httpRequest(opaqueId, payloadSize)); @@ -285,7 +289,7 @@ public void test100Continue() throws Exception { try (var ctx = setupClientCtx()) { for (int reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var acceptableContentLength = randomIntBetween(0, maxContentLength()); + var acceptableContentLength = randomIntBetween(0, MAX_CONTENT_LENGTH); // send request header and await 100-continue var req = httpRequest(id, acceptableContentLength); @@ -317,7 +321,7 @@ public void test413TooLargeOnExpect100Continue() throws Exception { try (var ctx = setupClientCtx()) { for (int reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var oversized = maxContentLength() + 1; + var oversized = MAX_CONTENT_LENGTH + 1; // send request header and await 413 too large var req = httpRequest(id, oversized); @@ -333,32 +337,28 @@ public void test413TooLargeOnExpect100Continue() throws Exception { } } - // ensures that oversized chunked encoded request has no limits at http layer - // rest handler is responsible for oversized requests - public void testOversizedChunkedEncodingNoLimits() throws Exception { + // ensures that oversized chunked encoded request has maxContentLength limit and returns 413 + public void testOversizedChunkedEncoding() throws Exception { try (var ctx = setupClientCtx()) { - for (var reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { - var id = opaqueId(reqNo); - var contentSize = maxContentLength() + 1; - var content = randomByteArrayOfLength(contentSize); - var is = new ByteBufInputStream(Unpooled.wrappedBuffer(content)); - var chunkedIs = new ChunkedStream(is); - var httpChunkedIs = new HttpChunkedInput(chunkedIs, LastHttpContent.EMPTY_LAST_CONTENT); - var req = httpRequest(id, 0); - HttpUtil.setTransferEncodingChunked(req, true); - - ctx.clientChannel.pipeline().addLast(new ChunkedWriteHandler()); - ctx.clientChannel.writeAndFlush(req); - ctx.clientChannel.writeAndFlush(httpChunkedIs); - var handler = ctx.awaitRestChannelAccepted(id); - var consumed = handler.readAllBytes(); - assertEquals(contentSize, consumed); - handler.sendResponse(new RestResponse(RestStatus.OK, "")); - - var resp = (FullHttpResponse) safePoll(ctx.clientRespQueue); - assertEquals(HttpResponseStatus.OK, resp.status()); - resp.release(); - } + var id = opaqueId(0); + var contentSize = MAX_CONTENT_LENGTH + 1; + var content = randomByteArrayOfLength(contentSize); + var is = new ByteBufInputStream(Unpooled.wrappedBuffer(content)); + var chunkedIs = new ChunkedStream(is); + var httpChunkedIs = new HttpChunkedInput(chunkedIs, LastHttpContent.EMPTY_LAST_CONTENT); + var req = httpRequest(id, 0); + HttpUtil.setTransferEncodingChunked(req, true); + + ctx.clientChannel.pipeline().addLast(new ChunkedWriteHandler()); + ctx.clientChannel.writeAndFlush(req); + ctx.clientChannel.writeAndFlush(httpChunkedIs); + var handler = ctx.awaitRestChannelAccepted(id); + var consumed = handler.readAllBytes(); + assertTrue(consumed <= MAX_CONTENT_LENGTH); + + var resp = (FullHttpResponse) safePoll(ctx.clientRespQueue); + assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); + resp.release(); } } @@ -369,7 +369,7 @@ public void testBadRequestReleaseQueuedChunks() throws Exception { try (var ctx = setupClientCtx()) { for (var reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var contentSize = randomIntBetween(0, maxContentLength()); + var contentSize = randomIntBetween(0, MAX_CONTENT_LENGTH); var req = httpRequest(id, contentSize); var content = randomContent(contentSize, true); @@ -405,7 +405,7 @@ public void testHttpClientStats() throws Exception { for (var reqNo = 0; reqNo < randomIntBetween(2, 10); reqNo++) { var id = opaqueId(reqNo); - var contentSize = randomIntBetween(0, maxContentLength()); + var contentSize = randomIntBetween(0, MAX_CONTENT_LENGTH); totalBytesSent += contentSize; ctx.clientChannel.writeAndFlush(httpRequest(id, contentSize)); ctx.clientChannel.writeAndFlush(randomContent(contentSize, true)); @@ -485,10 +485,6 @@ private void assertHttpBodyLogging(Function test) throws Exceptio } } - private int maxContentLength() { - return HttpHandlingSettings.fromSettings(internalCluster().getInstance(Settings.class)).maxContentLength(); - } - private String opaqueId(int reqNo) { return getTestName() + "-" + reqNo; } @@ -658,14 +654,22 @@ void sendResponse(RestResponse response) { int readBytes(int bytes) { var consumed = 0; if (recvLast == false) { - while (consumed < bytes) { - stream.next(); - var recvChunk = safePoll(recvChunks); - consumed += recvChunk.chunk.length(); - recvChunk.chunk.close(); - if (recvChunk.isLast) { - recvLast = true; - break; + stream.next(); + while (consumed < bytes && streamClosed == false) { + try { + var recvChunk = recvChunks.poll(10, TimeUnit.MILLISECONDS); + if (recvChunk != null) { + consumed += recvChunk.chunk.length(); + recvChunk.chunk.close(); + if (recvChunk.isLast) { + recvLast = true; + break; + } + stream.next(); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new AssertionError(e); } } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java index 021ce09e0ed8e..0294b4626496c 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpAggregator.java @@ -11,13 +11,10 @@ import io.netty.channel.ChannelHandlerContext; import io.netty.handler.codec.http.FullHttpRequest; -import io.netty.handler.codec.http.FullHttpResponse; -import io.netty.handler.codec.http.HttpContent; import io.netty.handler.codec.http.HttpObject; import io.netty.handler.codec.http.HttpObjectAggregator; import io.netty.handler.codec.http.HttpRequest; -import io.netty.handler.codec.http.HttpResponseStatus; -import io.netty.handler.codec.http.HttpUtil; +import io.netty.handler.codec.http.HttpRequestDecoder; import org.elasticsearch.http.HttpPreRequest; import org.elasticsearch.http.netty4.internal.HttpHeadersAuthenticatorUtils; @@ -27,18 +24,19 @@ /** * A wrapper around {@link HttpObjectAggregator}. Provides optional content aggregation based on * predicate. {@link HttpObjectAggregator} also handles Expect: 100-continue and oversized content. - * Unfortunately, Netty does not provide handlers for oversized messages beyond HttpObjectAggregator. + * Provides content size handling for non-aggregated requests too. */ public class Netty4HttpAggregator extends HttpObjectAggregator { private static final Predicate IGNORE_TEST = (req) -> req.uri().startsWith("/_test/request-stream") == false; private final Predicate decider; + private final Netty4HttpContentSizeHandler streamContentSizeHandler; private boolean aggregating = true; - private boolean ignoreContentAfterContinueResponse = false; - public Netty4HttpAggregator(int maxContentLength, Predicate decider) { + public Netty4HttpAggregator(int maxContentLength, Predicate decider, HttpRequestDecoder decoder) { super(maxContentLength); this.decider = decider; + this.streamContentSizeHandler = new Netty4HttpContentSizeHandler(decoder, maxContentLength); } @Override @@ -51,34 +49,7 @@ public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception if (aggregating || msg instanceof FullHttpRequest) { super.channelRead(ctx, msg); } else { - handle(ctx, (HttpObject) msg); - } - } - - private void handle(ChannelHandlerContext ctx, HttpObject msg) { - if (msg instanceof HttpRequest request) { - var continueResponse = newContinueResponse(request, maxContentLength(), ctx.pipeline()); - if (continueResponse != null) { - // there are 3 responses expected: 100, 413, 417 - // on 100 we pass request further and reply to client to continue - // on 413/417 we ignore following content - ctx.writeAndFlush(continueResponse); - var resp = (FullHttpResponse) continueResponse; - if (resp.status() != HttpResponseStatus.CONTINUE) { - ignoreContentAfterContinueResponse = true; - return; - } - HttpUtil.set100ContinueExpected(request, false); - } - ignoreContentAfterContinueResponse = false; - ctx.fireChannelRead(msg); - } else { - var httpContent = (HttpContent) msg; - if (ignoreContentAfterContinueResponse) { - httpContent.release(); - } else { - ctx.fireChannelRead(msg); - } + streamContentSizeHandler.channelRead(ctx, msg); } } } diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandler.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandler.java new file mode 100644 index 0000000000000..2b322fefa1262 --- /dev/null +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandler.java @@ -0,0 +1,171 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.http.netty4; + +import io.netty.buffer.Unpooled; +import io.netty.channel.ChannelFutureListener; +import io.netty.channel.ChannelHandlerContext; +import io.netty.channel.ChannelInboundHandlerAdapter; +import io.netty.handler.codec.http.DefaultFullHttpResponse; +import io.netty.handler.codec.http.DefaultHttpHeaders; +import io.netty.handler.codec.http.EmptyHttpHeaders; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpContent; +import io.netty.handler.codec.http.HttpHeaderNames; +import io.netty.handler.codec.http.HttpHeaderValues; +import io.netty.handler.codec.http.HttpObject; +import io.netty.handler.codec.http.HttpRequest; +import io.netty.handler.codec.http.HttpRequestDecoder; +import io.netty.handler.codec.http.HttpResponseStatus; +import io.netty.handler.codec.http.HttpUtil; +import io.netty.handler.codec.http.HttpVersion; +import io.netty.handler.codec.http.LastHttpContent; + +import org.elasticsearch.core.SuppressForbidden; + +import static io.netty.handler.codec.http.HttpHeaderNames.CONNECTION; +import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_LENGTH; + +/** + * Provides handling for 'Expect' header and content size. Implements HTTP1.1 spec. + * Allows {@code Expect: 100-continue} header only. Other 'Expect' headers will be rejected with + * {@code 417 Expectation Failed} reason. + *
+ * Replies {@code 100 Continue} to requests with allowed maxContentLength. + *
+ * Replies {@code 413 Request Entity Too Large} when content size exceeds maxContentLength. + * + * Channel can be reused for requests with "Expect:100-Continue" header that exceed allowed content length, + * as long as request does not include content. If oversized request already contains content then + * we cannot safely proceed and connection will be closed. + *

+ * TODO: move to RestController to allow content limits per RestHandler. + * Ideally we should be able to handle Continue and oversized request in the RestController. + *
    + *
  • + * 100 Continue is interim response, means RestChannel will send 2 responses for a single request. See + * rfc9110.html#status.100 + *
  • + *
  • + * RestChannel should be able to close underlying HTTP channel connection. + *
  • + *
+ */ +@SuppressForbidden(reason = "use of default ChannelFutureListener's CLOSE and CLOSE_ON_FAILURE") +public class Netty4HttpContentSizeHandler extends ChannelInboundHandlerAdapter { + + // copied from netty's HttpObjectAggregator + static final FullHttpResponse CONTINUE = new DefaultFullHttpResponse( + HttpVersion.HTTP_1_1, + HttpResponseStatus.CONTINUE, + Unpooled.EMPTY_BUFFER + ); + static final FullHttpResponse EXPECTATION_FAILED_CLOSE = new DefaultFullHttpResponse( + HttpVersion.HTTP_1_1, + HttpResponseStatus.EXPECTATION_FAILED, + Unpooled.EMPTY_BUFFER, + new DefaultHttpHeaders().add(CONTENT_LENGTH, 0).add(CONNECTION, HttpHeaderValues.CLOSE), + EmptyHttpHeaders.INSTANCE + ); + static final FullHttpResponse TOO_LARGE_CLOSE = new DefaultFullHttpResponse( + HttpVersion.HTTP_1_1, + HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, + Unpooled.EMPTY_BUFFER, + new DefaultHttpHeaders().add(CONTENT_LENGTH, 0).add(CONNECTION, HttpHeaderValues.CLOSE), + EmptyHttpHeaders.INSTANCE + ); + static final FullHttpResponse TOO_LARGE = new DefaultFullHttpResponse( + HttpVersion.HTTP_1_1, + HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, + Unpooled.EMPTY_BUFFER, + new DefaultHttpHeaders().add(CONTENT_LENGTH, 0), + EmptyHttpHeaders.INSTANCE + ); + + private final int maxContentLength; + private final HttpRequestDecoder decoder; // need to reset decoder after sending 413 + private int currentContentLength; // chunked encoding does not provide content length, need to track actual length + private boolean contentExpected; + + public Netty4HttpContentSizeHandler(HttpRequestDecoder decoder, int maxContentLength) { + this.maxContentLength = maxContentLength; + this.decoder = decoder; + } + + @Override + public void channelRead(ChannelHandlerContext ctx, Object msg) { + assert msg instanceof HttpObject; + if (msg instanceof HttpRequest request) { + handleRequest(ctx, request); + } else { + handleContent(ctx, (HttpContent) msg); + } + } + + private void handleRequest(ChannelHandlerContext ctx, HttpRequest request) { + contentExpected = false; + if (request.decoderResult().isFailure()) { + ctx.fireChannelRead(request); + return; + } + + final var expectValue = request.headers().get(HttpHeaderNames.EXPECT); + boolean isContinueExpected = false; + // Only "Expect: 100-Continue" header is supported + if (expectValue != null) { + if (HttpHeaderValues.CONTINUE.toString().equalsIgnoreCase(expectValue)) { + isContinueExpected = true; + } else { + ctx.writeAndFlush(EXPECTATION_FAILED_CLOSE.retainedDuplicate()).addListener(ChannelFutureListener.CLOSE); + return; + } + } + + boolean isOversized = HttpUtil.getContentLength(request, -1) > maxContentLength; + if (isOversized) { + if (isContinueExpected) { + // Client is allowed to send content without waiting for Continue. + // See https://www.rfc-editor.org/rfc/rfc9110.html#section-10.1.1-11.3 + // this content will result in HttpRequestDecoder failure and send downstream + decoder.reset(); + ctx.writeAndFlush(TOO_LARGE.retainedDuplicate()).addListener(ChannelFutureListener.CLOSE_ON_FAILURE); + } else { + // Client is sending oversized content, we cannot safely take it. Closing channel. + ctx.writeAndFlush(TOO_LARGE_CLOSE.retainedDuplicate()).addListener(ChannelFutureListener.CLOSE); + } + } else { + contentExpected = true; + currentContentLength = 0; + if (isContinueExpected) { + ctx.writeAndFlush(CONTINUE.retainedDuplicate()); + HttpUtil.set100ContinueExpected(request, false); + } + ctx.fireChannelRead(request); + } + } + + private void handleContent(ChannelHandlerContext ctx, HttpContent msg) { + if (contentExpected) { + currentContentLength += msg.content().readableBytes(); + if (currentContentLength > maxContentLength) { + msg.release(); + ctx.writeAndFlush(TOO_LARGE_CLOSE.retainedDuplicate()).addListener(ChannelFutureListener.CLOSE); + } else { + ctx.fireChannelRead(msg); + } + } else { + msg.release(); + if (msg != LastHttpContent.EMPTY_LAST_CONTENT) { + ctx.close(); // there is no reliable recovery from unexpected content, closing channel + } + } + } + +} diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestBodyStream.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestBodyStream.java index 0902e707b706e..88b4518c8de89 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestBodyStream.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpRequestBodyStream.java @@ -75,13 +75,15 @@ public void addTracingHandler(ChunkHandler chunkHandler) { @Override public void next() { - assert closing == false : "cannot request next chunk on closing stream"; assert handler != null : "handler must be set before requesting next chunk"; requestContext = threadContext.newStoredContext(); channel.eventLoop().submit(() -> { activityTracker.startActivity(); requested = true; try { + if (closing) { + return; + } if (buf == null) { channel.read(); } else { diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java index 36c860f1fb90b..9ffa4b479be17 100644 --- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java +++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java @@ -381,7 +381,8 @@ protected HttpMessage createMessage(String[] initialLine) throws Exception { handlingSettings.maxContentLength(), httpPreRequest -> enabled.get() == false || ((httpPreRequest.rawPath().endsWith("/_bulk") == false) - || httpPreRequest.rawPath().startsWith("/_xpack/monitoring/_bulk")) + || httpPreRequest.rawPath().startsWith("/_xpack/monitoring/_bulk")), + decoder ); aggregator.setMaxCumulationBufferComponents(transport.maxCompositeBufferComponents); ch.pipeline() diff --git a/modules/transport-netty4/src/main/plugin-metadata/entitlement-policy.yaml b/modules/transport-netty4/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 0000000000000..eb772a06423a3 --- /dev/null +++ b/modules/transport-netty4/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,6 @@ +io.netty.transport: + - inbound_network + - outbound_network +io.netty.common: + - inbound_network + - outbound_network diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandlerTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandlerTests.java new file mode 100644 index 0000000000000..3f8fe0075689f --- /dev/null +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/http/netty4/Netty4HttpContentSizeHandlerTests.java @@ -0,0 +1,238 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.http.netty4; + +import io.netty.buffer.ByteBuf; +import io.netty.buffer.Unpooled; +import io.netty.channel.embedded.EmbeddedChannel; +import io.netty.handler.codec.http.DefaultHttpContent; +import io.netty.handler.codec.http.DefaultHttpRequest; +import io.netty.handler.codec.http.DefaultLastHttpContent; +import io.netty.handler.codec.http.FullHttpResponse; +import io.netty.handler.codec.http.HttpContent; +import io.netty.handler.codec.http.HttpHeaderNames; +import io.netty.handler.codec.http.HttpHeaderValues; +import io.netty.handler.codec.http.HttpMethod; +import io.netty.handler.codec.http.HttpObject; +import io.netty.handler.codec.http.HttpRequest; +import io.netty.handler.codec.http.HttpRequestDecoder; +import io.netty.handler.codec.http.HttpRequestEncoder; +import io.netty.handler.codec.http.HttpResponseStatus; +import io.netty.handler.codec.http.HttpUtil; +import io.netty.handler.codec.http.HttpVersion; +import io.netty.handler.codec.http.LastHttpContent; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; + +public class Netty4HttpContentSizeHandlerTests extends ESTestCase { + + private static final int MAX_CONTENT_LENGTH = 1024; + private static final int OVERSIZED_LENGTH = MAX_CONTENT_LENGTH + 1; + private static final int REPS = 1000; + private EmbeddedChannel channel; + private EmbeddedChannel encoder; // channel to encode HTTP objects into bytes + + private static HttpContent httpContent(int size) { + return new DefaultHttpContent(Unpooled.wrappedBuffer(randomByteArrayOfLength(size))); + } + + private static LastHttpContent lastHttpContent(int size) { + return new DefaultLastHttpContent(Unpooled.wrappedBuffer(randomByteArrayOfLength(size))); + } + + private HttpRequest httpRequest() { + return new DefaultHttpRequest(HttpVersion.HTTP_1_1, HttpMethod.POST, "/"); + } + + // encodes multiple HTTP objects into single ByteBuf + private ByteBuf encode(HttpObject... objs) { + var out = Unpooled.compositeBuffer(); + Arrays.stream(objs).forEach(encoder::writeOutbound); + while (encoder.outboundMessages().isEmpty() == false) { + out.addComponent(true, encoder.readOutbound()); + } + return out; + } + + @Override + public void setUp() throws Exception { + super.setUp(); + var decoder = new HttpRequestDecoder(); + encoder = new EmbeddedChannel(new HttpRequestEncoder()); + channel = new EmbeddedChannel(decoder, new Netty4HttpContentSizeHandler(decoder, MAX_CONTENT_LENGTH)); + } + + /** + * Assert that handler replies 100-continue for acceptable request and pass request further. + */ + public void testContinue() { + for (var i = 0; i < REPS; i++) { + var sendRequest = httpRequest(); + HttpUtil.set100ContinueExpected(sendRequest, true); + channel.writeInbound(encode(sendRequest)); + assertEquals("should send back 100-continue", Netty4HttpContentSizeHandler.CONTINUE, channel.readOutbound()); + var recvRequest = (HttpRequest) channel.readInbound(); + assertNotNull(recvRequest); + assertFalse(HttpUtil.is100ContinueExpected(recvRequest)); + channel.writeInbound(encode(LastHttpContent.EMPTY_LAST_CONTENT)); + assertEquals(LastHttpContent.EMPTY_LAST_CONTENT, channel.readInbound()); + } + } + + /** + * Assert that handler pass through acceptable request. + */ + public void testWithoutContinue() { + for (var i = 0; i < REPS; i++) { + var sendRequest = httpRequest(); + channel.writeInbound(encode(sendRequest)); + assertNull("should not receive response", channel.readOutbound()); + assertNotNull("request should pass", channel.readInbound()); + channel.writeInbound(encode(LastHttpContent.EMPTY_LAST_CONTENT)); + assertEquals(LastHttpContent.EMPTY_LAST_CONTENT, channel.readInbound()); + } + } + + /** + * Assert that handler pass through request and content for acceptable request. + */ + public void testContinueWithContent() { + for (var i = 0; i < REPS; i++) { + var sendRequest = httpRequest(); + HttpUtil.set100ContinueExpected(sendRequest, true); + HttpUtil.setContentLength(sendRequest, MAX_CONTENT_LENGTH); + var sendContent = lastHttpContent(MAX_CONTENT_LENGTH); + channel.writeInbound(encode(sendRequest, sendContent)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals("should send back 100-continue", Netty4HttpContentSizeHandler.CONTINUE, resp); + resp.release(); + var recvRequest = (HttpRequest) channel.readInbound(); + assertNotNull(recvRequest); + var recvContent = (HttpContent) channel.readInbound(); + assertNotNull(recvContent); + assertEquals(MAX_CONTENT_LENGTH, recvContent.content().readableBytes()); + recvContent.release(); + } + } + + /** + * Assert that handler return 417 Expectation Failed and closes channel on request + * with "Expect" header other than "100-Continue". + */ + public void testExpectationFailed() { + var sendRequest = httpRequest(); + sendRequest.headers().set(HttpHeaderNames.EXPECT, randomValueOtherThan(HttpHeaderValues.CONTINUE, ESTestCase::randomIdentifier)); + channel.writeInbound(encode(sendRequest)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals(HttpResponseStatus.EXPECTATION_FAILED, resp.status()); + assertFalse(channel.isOpen()); + resp.release(); + } + + /** + * Assert that handler returns 413 Request Entity Too Large for oversized request + * and does not close channel if following content is not present. + */ + public void testEntityTooLarge() { + for (var i = 0; i < REPS; i++) { + var sendRequest = httpRequest(); + HttpUtil.set100ContinueExpected(sendRequest, true); + HttpUtil.setContentLength(sendRequest, OVERSIZED_LENGTH); + channel.writeInbound(encode(sendRequest, LastHttpContent.EMPTY_LAST_CONTENT)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); + assertNull("request should not pass", channel.readInbound()); + assertTrue("should not close channel", channel.isOpen()); + resp.release(); + } + } + + /** + * Mixed load of oversized and normal requests with Exepct:100-Continue. + */ + public void testMixedContent() { + for (int i = 0; i < REPS; i++) { + var isOversized = randomBoolean(); + var sendRequest = httpRequest(); + HttpUtil.set100ContinueExpected(sendRequest, true); + if (isOversized) { + HttpUtil.setContentLength(sendRequest, OVERSIZED_LENGTH); + channel.writeInbound(encode(sendRequest)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); + channel.writeInbound(encode(LastHttpContent.EMPTY_LAST_CONTENT)); // terminate + assertNull(channel.readInbound()); + resp.release(); + } else { + var normalSize = between(1, MAX_CONTENT_LENGTH); + HttpUtil.setContentLength(sendRequest, normalSize); + channel.writeInbound(encode(sendRequest)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals(HttpResponseStatus.CONTINUE, resp.status()); + resp.release(); + var sendContent = lastHttpContent(normalSize); + channel.writeInbound(encode(sendContent)); + var recvRequest = (HttpRequest) channel.readInbound(); + var recvContent = (LastHttpContent) channel.readInbound(); + assertEquals("content length header should match", normalSize, HttpUtil.getContentLength(recvRequest)); + assertFalse("should remove expect header", HttpUtil.is100ContinueExpected(recvRequest)); + assertEquals("actual content size should match", normalSize, recvContent.content().readableBytes()); + recvContent.release(); + } + } + } + + /** + * Assert that handler returns 413 Request Entity Too Large and close channel for + * oversized request with content. + */ + public void testEntityTooLargeWithContentWithoutExpect() { + var sendRequest = httpRequest(); + HttpUtil.setContentLength(sendRequest, OVERSIZED_LENGTH); + var unexpectedContent = lastHttpContent(OVERSIZED_LENGTH); + channel.writeInbound(encode(sendRequest, unexpectedContent)); + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals(HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); + assertFalse(channel.isOpen()); + resp.release(); + } + + /** + * Assert that handler return 413 Request Entity Too Large and closes channel for oversized + * requests with chunked content. + */ + public void testEntityTooLargeWithChunkedContent() { + var sendRequest = httpRequest(); + HttpUtil.setTransferEncodingChunked(sendRequest, true); + channel.writeInbound(encode(sendRequest)); + assertTrue("request should pass", channel.readInbound() instanceof HttpRequest); + + int contentBytesSent = 0; + do { + var thisPartSize = between(1, MAX_CONTENT_LENGTH * 2); + channel.writeInbound(encode(httpContent(thisPartSize))); + contentBytesSent += thisPartSize; + + if (contentBytesSent <= MAX_CONTENT_LENGTH) { + ((HttpContent) channel.readInbound()).release(); + } else { + break; + } + } while (true); + + var resp = (FullHttpResponse) channel.readOutbound(); + assertEquals("should respond with 413", HttpResponseStatus.REQUEST_ENTITY_TOO_LARGE, resp.status()); + assertFalse("should close channel", channel.isOpen()); + resp.release(); + } + +} diff --git a/muted-tests.yml b/muted-tests.yml index d26a8350b3873..2a343fa14c58c 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -140,9 +140,6 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_reset/Test reset running transform} issue: https://github.com/elastic/elasticsearch/issues/117473 -- class: org.elasticsearch.search.ccs.CrossClusterIT - method: testCancel - issue: https://github.com/elastic/elasticsearch/issues/108061 - class: org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT method: test {p0=search.highlight/50_synthetic_source/text multi unified from vectors} issue: https://github.com/elastic/elasticsearch/issues/117815 @@ -181,9 +178,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/118374 - class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT issue: https://github.com/elastic/elasticsearch/issues/118238 -- class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests - method: testInvalidJSON - issue: https://github.com/elastic/elasticsearch/issues/116521 - class: org.elasticsearch.xpack.ccr.rest.ShardChangesRestIT method: testShardChangesNoOperation issue: https://github.com/elastic/elasticsearch/issues/118800 @@ -195,9 +189,6 @@ tests: issue: https://github.com/elastic/elasticsearch/issues/118914 - class: org.elasticsearch.xpack.security.authc.ldap.ActiveDirectoryRunAsIT issue: https://github.com/elastic/elasticsearch/issues/115727 -- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT - method: test {yaml=reference/search/search-your-data/retrievers-examples/line_98} - issue: https://github.com/elastic/elasticsearch/issues/119155 - class: org.elasticsearch.xpack.esql.action.EsqlNodeFailureIT method: testFailureLoadingFields issue: https://github.com/elastic/elasticsearch/issues/118000 @@ -223,9 +214,6 @@ tests: - class: org.elasticsearch.xpack.test.rest.XPackRestIT method: test {p0=transform/transforms_unattended/Test unattended put and start} issue: https://github.com/elastic/elasticsearch/issues/120019 -- class: org.elasticsearch.index.mapper.IntervalThrottlerTests - method: testThrottling - issue: https://github.com/elastic/elasticsearch/issues/120023 - class: org.elasticsearch.xpack.ilm.actions.SearchableSnapshotActionIT method: testUpdatePolicyToAddPhasesYieldsInvalidActionsToBeSkipped issue: https://github.com/elastic/elasticsearch/issues/118406 @@ -241,6 +229,28 @@ tests: - class: org.elasticsearch.xpack.migrate.action.ReindexDatastreamIndexTransportActionIT method: testTsdbStartEndSet issue: https://github.com/elastic/elasticsearch/issues/120314 +- class: org.elasticsearch.xpack.test.rest.XPackRestIT + method: test {p0=snapshot/10_basic/Failed to snapshot indices with synthetic source} + issue: https://github.com/elastic/elasticsearch/issues/120332 +- class: org.elasticsearch.xpack.ccr.FollowIndexSecurityIT + method: testCleanShardFollowTaskAfterDeleteFollower + issue: https://github.com/elastic/elasticsearch/issues/120339 +- class: org.elasticsearch.search.ccs.CrossClusterIT + method: testCancel + issue: https://github.com/elastic/elasticsearch/issues/108061 +- class: org.elasticsearch.xpack.logsdb.seqno.RetentionLeaseRestIT + issue: https://github.com/elastic/elasticsearch/issues/120434 +- class: org.elasticsearch.entitlement.qa.EntitlementsAllowedIT + method: testCheckActionWithPolicyPass {pathPrefix=allowed actionName=create_ldap_cert_store} + issue: https://github.com/elastic/elasticsearch/issues/120422 +- class: org.elasticsearch.entitlement.qa.EntitlementsAllowedIT + method: testCheckActionWithPolicyPass {pathPrefix=allowed_nonmodular actionName=create_ldap_cert_store} + issue: https://github.com/elastic/elasticsearch/issues/120423 +- class: org.elasticsearch.reservedstate.service.FileSettingsServiceTests + method: testInvalidJSON + issue: https://github.com/elastic/elasticsearch/issues/120482 +- class: org.elasticsearch.smoketest.DocsClientYamlTestSuiteIT + issue: https://github.com/elastic/elasticsearch/issues/120497 # Examples: # diff --git a/plugins/discovery-ec2/build.gradle b/plugins/discovery-ec2/build.gradle index a8255c1b54517..a824eda6a4620 100644 --- a/plugins/discovery-ec2/build.gradle +++ b/plugins/discovery-ec2/build.gradle @@ -29,6 +29,8 @@ dependencies { api "joda-time:joda-time:2.10.10" javaRestTestImplementation project(':plugins:discovery-ec2') + javaRestTestImplementation project(':test:fixtures:aws-fixture-utils') + javaRestTestImplementation project(':test:fixtures:aws-ec2-fixture') javaRestTestImplementation project(':test:fixtures:ec2-imds-fixture') internalClusterTestImplementation project(':test:fixtures:ec2-imds-fixture') @@ -99,11 +101,6 @@ tasks.withType(Test).configureEach { } } -tasks.named("check").configure { - // also execute the QA tests when testing the plugin - dependsOn 'qa:amazon-ec2:check' -} - tasks.named("thirdPartyAudit").configure { ignoreMissingClasses( // classes are missing diff --git a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle b/plugins/discovery-ec2/qa/amazon-ec2/build.gradle deleted file mode 100644 index 5f0fee6636256..0000000000000 --- a/plugins/discovery-ec2/qa/amazon-ec2/build.gradle +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -import org.apache.tools.ant.filters.ReplaceTokens -import org.elasticsearch.gradle.internal.test.AntFixture -import org.elasticsearch.gradle.internal.test.RestIntegTestTask -import org.elasticsearch.gradle.internal.test.rest.LegacyYamlRestTestPlugin - -import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE - -apply plugin: 'elasticsearch.legacy-yaml-rest-test' - -dependencies { - yamlRestTestImplementation project(':plugins:discovery-ec2') -} - -restResources { - restApi { - include '_common', 'cluster', 'nodes' - } -} - -final int ec2NumberOfNodes = 3 - -Map expansions = [ - 'expected_nodes': ec2NumberOfNodes -] - -tasks.named("processYamlRestTestResources").configure { - inputs.properties(expansions) - filter("tokens" : expansions.collectEntries {k, v -> [k, v.toString()]} /* must be a map of strings */, ReplaceTokens.class) -} - -// disable default yamlRestTest task, use spezialized ones below -tasks.named("yamlRestTest").configure { enabled = false } - -/* - * Test using various credential providers (see also https://docs.aws.amazon.com/sdk-for-java/v2/developer-guide/credentials.html): - * - Elasticsearch Keystore (secure settings discovery.ec2.access_key and discovery.ec2.secret_key) - * - Java system properties (aws.accessKeyId and aws.secretAccessKey) - * - Environment variables (AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY) - * - ECS container credentials (loaded from ECS if the environment variable AWS_CONTAINER_CREDENTIALS_RELATIVE_URI is set) - * - Instance profile credentials (delivered through the EC2 metadata service) - * - * Notably missing is a test for the default credential profiles file, which is located at ~/.aws/credentials and would at least require a - * custom Java security policy to work. - */ -['KeyStore', 'EnvVariables', 'SystemProperties', 'ContainerCredentials', 'InstanceProfile'].forEach { action -> - TaskProvider fixture = tasks.register("ec2Fixture${action}", AntFixture) { - dependsOn project.sourceSets.yamlRestTest.runtimeClasspath - FileCollection cp = project.sourceSets.yamlRestTest.runtimeClasspath - env 'CLASSPATH', "${-> cp.asPath}" - executable = "${buildParams.runtimeJavaHome.get() }/bin/java" - args 'org.elasticsearch.discovery.ec2.AmazonEC2Fixture', baseDir, "${buildDir}/testclusters/yamlRestTest${action}-1/config/unicast_hosts.txt" - } - - def yamlRestTestTask = tasks.register("yamlRestTest${action}", RestIntegTestTask) { - dependsOn fixture - SourceSetContainer sourceSets = project.getExtensions().getByType(SourceSetContainer.class); - SourceSet yamlRestTestSourceSet = sourceSets.getByName(LegacyYamlRestTestPlugin.SOURCE_SET_NAME) - testClassesDirs = yamlRestTestSourceSet.getOutput().getClassesDirs() - classpath = yamlRestTestSourceSet.getRuntimeClasspath() - } - - if(action == 'ContainerCredentials') { - def addressAndPortSource = fixture.get().addressAndPortSource - testClusters.matching { it.name == "yamlRestTestContainerCredentials" }.configureEach { - environment 'AWS_CONTAINER_CREDENTIALS_FULL_URI', - () -> addressAndPortSource.map{ addr -> "http://${addr}/ecs_credentials_endpoint" }.get(), IGNORE_VALUE - } - } - - tasks.named("check").configure { - dependsOn(yamlRestTestTask) - } - def addressAndPortSource = fixture.get().addressAndPortSource - - testClusters.matching { it.name == yamlRestTestTask.name}.configureEach { - numberOfNodes = ec2NumberOfNodes - plugin ':plugins:discovery-ec2' - - setting 'discovery.seed_providers', 'ec2' - setting 'network.host', '_ec2_' - setting 'discovery.ec2.endpoint', { "http://${-> addressAndPortSource.get()}" }, IGNORE_VALUE - - systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", { "http://${-> addressAndPortSource.get()}" }, IGNORE_VALUE - } -} - -// Extra config for KeyStore -testClusters.matching { it.name == "yamlRestTestKeyStore" }.configureEach { - keystore 'discovery.ec2.access_key', 'ec2_integration_test_access_key' - keystore 'discovery.ec2.secret_key', 'ec2_integration_test_secret_key' -} - -// Extra config for EnvVariables -testClusters.matching { it.name == "yamlRestTestEnvVariables" }.configureEach { - environment 'AWS_ACCESS_KEY_ID', 'ec2_integration_test_access_key' - environment 'AWS_SECRET_ACCESS_KEY', 'ec2_integration_test_secret_key' -} - -// Extra config for SystemProperties -testClusters.matching { it.name == "yamlRestTestSystemProperties" }.configureEach { - systemProperty 'aws.accessKeyId', 'ec2_integration_test_access_key' - systemProperty 'aws.secretKey', 'ec2_integration_test_secret_key' -} - -// Extra config for ContainerCredentials -tasks.named("ec2FixtureContainerCredentials").configure { - env 'ACTIVATE_CONTAINER_CREDENTIALS', true -} - -// Extra config for InstanceProfile -tasks.named("ec2FixtureInstanceProfile").configure { - env 'ACTIVATE_INSTANCE_PROFILE', true -} diff --git a/plugins/discovery-ec2/qa/amazon-ec2/src/yamlRestTest/java/org/elasticsearch/discovery/ec2/AmazonEC2DiscoveryClientYamlTestSuiteIT.java b/plugins/discovery-ec2/qa/amazon-ec2/src/yamlRestTest/java/org/elasticsearch/discovery/ec2/AmazonEC2DiscoveryClientYamlTestSuiteIT.java deleted file mode 100644 index a7cb6113502e7..0000000000000 --- a/plugins/discovery-ec2/qa/amazon-ec2/src/yamlRestTest/java/org/elasticsearch/discovery/ec2/AmazonEC2DiscoveryClientYamlTestSuiteIT.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ - -package org.elasticsearch.discovery.ec2; - -import com.carrotsearch.randomizedtesting.annotations.Name; -import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; - -import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; -import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; - -public class AmazonEC2DiscoveryClientYamlTestSuiteIT extends ESClientYamlSuiteTestCase { - - public AmazonEC2DiscoveryClientYamlTestSuiteIT(@Name("yaml") ClientYamlTestCandidate testCandidate) { - super(testCandidate); - } - - @ParametersFactory - public static Iterable parameters() throws Exception { - return createParameters(); - } -} diff --git a/plugins/discovery-ec2/qa/amazon-ec2/src/yamlRestTest/java/org/elasticsearch/discovery/ec2/AmazonEC2Fixture.java b/plugins/discovery-ec2/qa/amazon-ec2/src/yamlRestTest/java/org/elasticsearch/discovery/ec2/AmazonEC2Fixture.java deleted file mode 100644 index 2718580b5ff68..0000000000000 --- a/plugins/discovery-ec2/qa/amazon-ec2/src/yamlRestTest/java/org/elasticsearch/discovery/ec2/AmazonEC2Fixture.java +++ /dev/null @@ -1,251 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the "Elastic License - * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side - * Public License v 1"; you may not use this file except in compliance with, at - * your election, the "Elastic License 2.0", the "GNU Affero General Public - * License v3.0 only", or the "Server Side Public License, v 1". - */ -package org.elasticsearch.discovery.ec2; - -import com.amazonaws.util.DateUtils; - -import org.apache.http.NameValuePair; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; -import org.apache.http.client.utils.URLEncodedUtils; -import org.elasticsearch.core.Booleans; -import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.test.fixture.AbstractHttpFixture; - -import java.io.IOException; -import java.io.StringWriter; -import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.Date; -import java.util.HashMap; -import java.util.Locale; -import java.util.Map; -import java.util.Objects; -import java.util.UUID; -import java.util.concurrent.TimeUnit; - -import javax.xml.XMLConstants; -import javax.xml.stream.XMLOutputFactory; -import javax.xml.stream.XMLStreamWriter; - -import static java.nio.charset.StandardCharsets.UTF_8; - -/** - * {@link AmazonEC2Fixture} is a fixture that emulates an AWS EC2 service. - */ -public class AmazonEC2Fixture extends AbstractHttpFixture { - - private static final String IMDSV_2_TOKEN = "imdsv2-token"; - private static final String X_AWS_EC_2_METADATA_TOKEN = "X-aws-ec2-metadata-token"; - - private final Path nodes; - private final boolean instanceProfile; - private final boolean containerCredentials; - - private AmazonEC2Fixture(final String workingDir, final String nodesUriPath, boolean instanceProfile, boolean containerCredentials) { - super(workingDir); - this.nodes = toPath(Objects.requireNonNull(nodesUriPath)); - this.instanceProfile = instanceProfile; - this.containerCredentials = containerCredentials; - } - - public static void main(String[] args) throws Exception { - if (args == null || args.length != 2) { - throw new IllegalArgumentException("AmazonEC2Fixture "); - } - - boolean instanceProfile = Booleans.parseBoolean(System.getenv("ACTIVATE_INSTANCE_PROFILE"), false); - boolean containerCredentials = Booleans.parseBoolean(System.getenv("ACTIVATE_CONTAINER_CREDENTIALS"), false); - - final AmazonEC2Fixture fixture = new AmazonEC2Fixture(args[0], args[1], instanceProfile, containerCredentials); - fixture.listen(); - } - - @Override - protected Response handle(final Request request) throws IOException { - if ("/".equals(request.getPath()) && (HttpPost.METHOD_NAME.equals(request.getMethod()))) { - final String userAgent = request.getHeader("User-Agent"); - if (userAgent != null && userAgent.startsWith("aws-sdk-java")) { - - final String auth = request.getHeader("Authorization"); - if (auth == null || auth.contains("ec2_integration_test_access_key") == false) { - throw new IllegalArgumentException("wrong access key: " + auth); - } - - // Simulate an EC2 DescribeInstancesResponse - byte[] responseBody = EMPTY_BYTE; - for (NameValuePair parse : URLEncodedUtils.parse(new String(request.getBody(), UTF_8), UTF_8)) { - if ("Action".equals(parse.getName())) { - responseBody = generateDescribeInstancesResponse(); - break; - } - } - return new Response(RestStatus.OK.getStatus(), contentType("text/xml; charset=UTF-8"), responseBody); - } - } - if ("/latest/meta-data/local-ipv4".equals(request.getPath()) - && (HttpGet.METHOD_NAME.equals(request.getMethod())) - && request.getHeaders().getOrDefault(X_AWS_EC_2_METADATA_TOKEN, "").equals(IMDSV_2_TOKEN)) { - return new Response(RestStatus.OK.getStatus(), TEXT_PLAIN_CONTENT_TYPE, "127.0.0.1".getBytes(UTF_8)); - } - - if (instanceProfile - && "/latest/meta-data/iam/security-credentials/".equals(request.getPath()) - && HttpGet.METHOD_NAME.equals(request.getMethod()) - && request.getHeaders().getOrDefault(X_AWS_EC_2_METADATA_TOKEN, "").equals(IMDSV_2_TOKEN)) { - final Map headers = new HashMap<>(contentType("text/plain")); - return new Response(RestStatus.OK.getStatus(), headers, "my_iam_profile".getBytes(UTF_8)); - } - - if ("/latest/api/token".equals(request.getPath()) && HttpPut.METHOD_NAME.equals(request.getMethod())) { - return new Response(RestStatus.OK.getStatus(), TEXT_PLAIN_CONTENT_TYPE, IMDSV_2_TOKEN.getBytes(StandardCharsets.UTF_8)); - } - - if ((containerCredentials - && "/ecs_credentials_endpoint".equals(request.getPath()) - && HttpGet.METHOD_NAME.equals(request.getMethod())) - || ("/latest/meta-data/iam/security-credentials/my_iam_profile".equals(request.getPath()) - && HttpGet.METHOD_NAME.equals(request.getMethod()) - && request.getHeaders().getOrDefault(X_AWS_EC_2_METADATA_TOKEN, "").equals(IMDSV_2_TOKEN))) { - final Date expiration = new Date(new Date().getTime() + TimeUnit.DAYS.toMillis(1)); - final String response = String.format(Locale.ROOT, """ - { - "AccessKeyId": "ec2_integration_test_access_key", - "Expiration": "%s", - "RoleArn": "test", - "SecretAccessKey": "ec2_integration_test_secret_key", - "Token": "test" - }""", DateUtils.formatISO8601Date(expiration)); - - final Map headers = new HashMap<>(contentType("application/json")); - return new Response(RestStatus.OK.getStatus(), headers, response.getBytes(UTF_8)); - } - - return null; - } - - /** - * Generates a XML response that describe the EC2 instances - */ - private byte[] generateDescribeInstancesResponse() { - final XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newFactory(); - xmlOutputFactory.setProperty(XMLOutputFactory.IS_REPAIRING_NAMESPACES, true); - - final StringWriter out = new StringWriter(); - XMLStreamWriter sw; - try { - sw = xmlOutputFactory.createXMLStreamWriter(out); - sw.writeStartDocument(); - - String namespace = "http://ec2.amazonaws.com/doc/2013-02-01/"; - sw.setDefaultNamespace(namespace); - sw.writeStartElement(XMLConstants.DEFAULT_NS_PREFIX, "DescribeInstancesResponse", namespace); - { - sw.writeStartElement("requestId"); - sw.writeCharacters(UUID.randomUUID().toString()); - sw.writeEndElement(); - - sw.writeStartElement("reservationSet"); - { - if (Files.exists(nodes)) { - for (String address : Files.readAllLines(nodes)) { - - sw.writeStartElement("item"); - { - sw.writeStartElement("reservationId"); - sw.writeCharacters(UUID.randomUUID().toString()); - sw.writeEndElement(); - - sw.writeStartElement("instancesSet"); - { - sw.writeStartElement("item"); - { - sw.writeStartElement("instanceId"); - sw.writeCharacters(UUID.randomUUID().toString()); - sw.writeEndElement(); - - sw.writeStartElement("imageId"); - sw.writeCharacters(UUID.randomUUID().toString()); - sw.writeEndElement(); - - sw.writeStartElement("instanceState"); - { - sw.writeStartElement("code"); - sw.writeCharacters("16"); - sw.writeEndElement(); - - sw.writeStartElement("name"); - sw.writeCharacters("running"); - sw.writeEndElement(); - } - sw.writeEndElement(); - - sw.writeStartElement("privateDnsName"); - sw.writeCharacters(address); - sw.writeEndElement(); - - sw.writeStartElement("dnsName"); - sw.writeCharacters(address); - sw.writeEndElement(); - - sw.writeStartElement("instanceType"); - sw.writeCharacters("m1.medium"); - sw.writeEndElement(); - - sw.writeStartElement("placement"); - { - sw.writeStartElement("availabilityZone"); - sw.writeCharacters("use-east-1e"); - sw.writeEndElement(); - - sw.writeEmptyElement("groupName"); - - sw.writeStartElement("tenancy"); - sw.writeCharacters("default"); - sw.writeEndElement(); - } - sw.writeEndElement(); - - sw.writeStartElement("privateIpAddress"); - sw.writeCharacters(address); - sw.writeEndElement(); - - sw.writeStartElement("ipAddress"); - sw.writeCharacters(address); - sw.writeEndElement(); - } - sw.writeEndElement(); - } - sw.writeEndElement(); - } - sw.writeEndElement(); - } - } - sw.writeEndElement(); - } - sw.writeEndElement(); - - sw.writeEndDocument(); - sw.flush(); - } - } catch (Exception e) { - throw new RuntimeException(e); - } - return out.toString().getBytes(UTF_8); - } - - @SuppressForbidden(reason = "Paths#get is fine - we don't have environment here") - private static Path toPath(final String dir) { - return Paths.get(dir); - } -} diff --git a/plugins/discovery-ec2/qa/amazon-ec2/src/yamlRestTest/resources/rest-api-spec/test/discovery_ec2/10_basic.yml b/plugins/discovery-ec2/qa/amazon-ec2/src/yamlRestTest/resources/rest-api-spec/test/discovery_ec2/10_basic.yml deleted file mode 100644 index 48afe1a334bf8..0000000000000 --- a/plugins/discovery-ec2/qa/amazon-ec2/src/yamlRestTest/resources/rest-api-spec/test/discovery_ec2/10_basic.yml +++ /dev/null @@ -1,15 +0,0 @@ -# Integration tests for discovery-ec2 -setup: - - do: - cluster.health: - wait_for_status: green - wait_for_nodes: @expected_nodes@ - ---- -"All nodes are correctly discovered": - - - do: - nodes.info: - metric: [ transport ] - - - match: { _nodes.total: @expected_nodes@ } diff --git a/plugins/discovery-ec2/qa/build.gradle b/plugins/discovery-ec2/qa/build.gradle deleted file mode 100644 index 0aed6df883825..0000000000000 --- a/plugins/discovery-ec2/qa/build.gradle +++ /dev/null @@ -1 +0,0 @@ -group = "${group}.plugins.discovery-ec2.qa" diff --git a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2ClusterFormationTestCase.java b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2ClusterFormationTestCase.java new file mode 100644 index 0000000000000..12737515b9314 --- /dev/null +++ b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2ClusterFormationTestCase.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import org.elasticsearch.cluster.routing.Murmur3HashFunction; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.LogType; +import org.elasticsearch.test.rest.ESRestTestCase; + +import java.io.BufferedReader; +import java.io.IOException; +import java.io.InputStreamReader; +import java.nio.charset.StandardCharsets; +import java.util.HashSet; +import java.util.regex.Pattern; + +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.hasItem; + +public abstract class DiscoveryEc2ClusterFormationTestCase extends ESRestTestCase { + + protected abstract ElasticsearchCluster getCluster(); + + @Override + protected String getTestRestCluster() { + return getCluster().getHttpAddresses(); + } + + public void testClusterFormation() throws IOException { + final var cluster = getCluster(); + final var expectedAddresses = new HashSet<>(cluster.getAvailableTransportEndpoints()); + final var addressesPattern = Pattern.compile(".* using dynamic transport addresses \\[(.*)]"); + + assertThat(cluster.getNumNodes(), greaterThan(1)); // multiple node cluster means discovery must have worked + for (int nodeIndex = 0; nodeIndex < cluster.getNumNodes(); nodeIndex++) { + try ( + var logStream = cluster.getNodeLog(nodeIndex, LogType.SERVER); + var logReader = new InputStreamReader(logStream, StandardCharsets.UTF_8); + var bufReader = new BufferedReader(logReader) + ) { + do { + final var line = bufReader.readLine(); + if (line == null) { + break; + } + + final var matcher = addressesPattern.matcher(line); + if (matcher.matches()) { + for (final var address : matcher.group(1).split(", ")) { + // TODO also add some nodes to the DescribeInstances output which are filtered out, and verify that we do not + // see their addresses here + assertThat(expectedAddresses, hasItem(address)); + } + } + } while (true); + } + } + } + + protected static String getIdentifierPrefix(String testSuiteName) { + return testSuiteName + "-" + Integer.toString(Murmur3HashFunction.hash(testSuiteName + System.getProperty("tests.seed")), 16) + "-"; + } +} diff --git a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2EcsCredentialsIT.java b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2EcsCredentialsIT.java new file mode 100644 index 0000000000000..5c6368fe0db67 --- /dev/null +++ b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2EcsCredentialsIT.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import fixture.aws.DynamicAwsCredentials; +import fixture.aws.ec2.AwsEc2HttpFixture; +import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.aws.imds.Ec2ImdsServiceBuilder; +import fixture.aws.imds.Ec2ImdsVersion; + +import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.util.List; +import java.util.Set; + +public class DiscoveryEc2EcsCredentialsIT extends DiscoveryEc2ClusterFormationTestCase { + + private static final DynamicAwsCredentials dynamicCredentials = new DynamicAwsCredentials(); + + private static final String PREFIX = getIdentifierPrefix("DiscoveryEc2EcsCredentialsIT"); + private static final String REGION = PREFIX + "-region"; + private static final String CREDENTIALS_ENDPOINT = "/ecs_credentials_endpoint_" + PREFIX; + + private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( + new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V1).newCredentialsConsumer(dynamicCredentials::addValidCredentials) + .alternativeCredentialsEndpoints(Set.of(CREDENTIALS_ENDPOINT)) + ); + + private static final AwsEc2HttpFixture ec2ApiFixture = new AwsEc2HttpFixture( + dynamicCredentials::isAuthorized, + DiscoveryEc2EcsCredentialsIT::getAvailableTransportEndpoints + ); + + private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() + .nodes(2) + .plugin("discovery-ec2") + .setting(DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING.getKey(), Ec2DiscoveryPlugin.EC2_SEED_HOSTS_PROVIDER_NAME) + .setting("logger." + AwsEc2SeedHostsProvider.class.getCanonicalName(), "DEBUG") + .setting(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), ec2ApiFixture::getAddress) + .environment("AWS_CONTAINER_CREDENTIALS_FULL_URI", () -> ec2ImdsHttpFixture.getAddress() + CREDENTIALS_ENDPOINT) + .environment("AWS_REGION", REGION) + .build(); + + private static List getAvailableTransportEndpoints() { + return cluster.getAvailableTransportEndpoints(); + } + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(ec2ImdsHttpFixture).around(ec2ApiFixture).around(cluster); + + @Override + protected ElasticsearchCluster getCluster() { + return cluster; + } +} diff --git a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2EnvironmentVariableCredentialsIT.java b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2EnvironmentVariableCredentialsIT.java new file mode 100644 index 0000000000000..09af39f1af588 --- /dev/null +++ b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2EnvironmentVariableCredentialsIT.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import fixture.aws.ec2.AwsEc2HttpFixture; + +import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.util.List; + +import static fixture.aws.AwsCredentialsUtils.fixedAccessKey; + +public class DiscoveryEc2EnvironmentVariableCredentialsIT extends DiscoveryEc2ClusterFormationTestCase { + + private static final String PREFIX = getIdentifierPrefix("DiscoveryEc2EnvironmentVariableCredentialsIT"); + private static final String REGION = PREFIX + "-region"; + private static final String ACCESS_KEY = PREFIX + "-access-key"; + + private static final AwsEc2HttpFixture ec2ApiFixture = new AwsEc2HttpFixture( + fixedAccessKey(ACCESS_KEY), + DiscoveryEc2EnvironmentVariableCredentialsIT::getAvailableTransportEndpoints + ); + + private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() + .nodes(2) + .plugin("discovery-ec2") + .setting(DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING.getKey(), Ec2DiscoveryPlugin.EC2_SEED_HOSTS_PROVIDER_NAME) + .setting("logger." + AwsEc2SeedHostsProvider.class.getCanonicalName(), "DEBUG") + .setting(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), ec2ApiFixture::getAddress) + .environment("AWS_REGION", REGION) + .environment("AWS_ACCESS_KEY_ID", ACCESS_KEY) + .environment("AWS_SECRET_ACCESS_KEY", ESTestCase::randomIdentifier) + .build(); + + private static List getAvailableTransportEndpoints() { + return cluster.getAvailableTransportEndpoints(); + } + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(ec2ApiFixture).around(cluster); + + @Override + protected ElasticsearchCluster getCluster() { + return cluster; + } +} diff --git a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2InstanceProfileIT.java b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2InstanceProfileIT.java new file mode 100644 index 0000000000000..abd06c7fa8367 --- /dev/null +++ b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2InstanceProfileIT.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import fixture.aws.DynamicAwsCredentials; +import fixture.aws.ec2.AwsEc2HttpFixture; +import fixture.aws.imds.Ec2ImdsHttpFixture; +import fixture.aws.imds.Ec2ImdsServiceBuilder; +import fixture.aws.imds.Ec2ImdsVersion; + +import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.util.List; + +public class DiscoveryEc2InstanceProfileIT extends DiscoveryEc2ClusterFormationTestCase { + + private static final DynamicAwsCredentials dynamicCredentials = new DynamicAwsCredentials(); + + private static final Ec2ImdsHttpFixture ec2ImdsHttpFixture = new Ec2ImdsHttpFixture( + new Ec2ImdsServiceBuilder(Ec2ImdsVersion.V2).instanceIdentityDocument( + (builder, params) -> builder.field("region", randomIdentifier()) + ).newCredentialsConsumer(dynamicCredentials::addValidCredentials) + ); + + private static final AwsEc2HttpFixture ec2ApiFixture = new AwsEc2HttpFixture( + dynamicCredentials::isAuthorized, + DiscoveryEc2InstanceProfileIT::getAvailableTransportEndpoints + ); + + private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() + .nodes(2) + .plugin("discovery-ec2") + .setting(DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING.getKey(), Ec2DiscoveryPlugin.EC2_SEED_HOSTS_PROVIDER_NAME) + .setting("logger." + AwsEc2SeedHostsProvider.class.getCanonicalName(), "DEBUG") + .setting(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), ec2ApiFixture::getAddress) + .systemProperty(Ec2ImdsHttpFixture.ENDPOINT_OVERRIDE_SYSPROP_NAME, ec2ImdsHttpFixture::getAddress) + .build(); + + private static List getAvailableTransportEndpoints() { + return cluster.getAvailableTransportEndpoints(); + } + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(ec2ImdsHttpFixture).around(ec2ApiFixture).around(cluster); + + @Override + protected ElasticsearchCluster getCluster() { + return cluster; + } +} diff --git a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2KeystoreCredentialsIT.java b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2KeystoreCredentialsIT.java new file mode 100644 index 0000000000000..4ae7b48274472 --- /dev/null +++ b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2KeystoreCredentialsIT.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import fixture.aws.ec2.AwsEc2HttpFixture; + +import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.util.List; + +import static fixture.aws.AwsCredentialsUtils.fixedAccessKey; + +public class DiscoveryEc2KeystoreCredentialsIT extends DiscoveryEc2ClusterFormationTestCase { + + private static final String PREFIX = getIdentifierPrefix("DiscoveryEc2KeystoreCredentialsIT"); + private static final String REGION = PREFIX + "-region"; + private static final String ACCESS_KEY = PREFIX + "-access-key"; + + private static final AwsEc2HttpFixture ec2ApiFixture = new AwsEc2HttpFixture( + fixedAccessKey(ACCESS_KEY), + DiscoveryEc2KeystoreCredentialsIT::getAvailableTransportEndpoints + ); + + private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() + .nodes(2) + .plugin("discovery-ec2") + .setting(DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING.getKey(), Ec2DiscoveryPlugin.EC2_SEED_HOSTS_PROVIDER_NAME) + .setting("logger." + AwsEc2SeedHostsProvider.class.getCanonicalName(), "DEBUG") + .setting(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), ec2ApiFixture::getAddress) + .environment("AWS_REGION", REGION) + .keystore("discovery.ec2.access_key", ACCESS_KEY) + .keystore("discovery.ec2.secret_key", ESTestCase::randomIdentifier) + .build(); + + private static List getAvailableTransportEndpoints() { + return cluster.getAvailableTransportEndpoints(); + } + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(ec2ApiFixture).around(cluster); + + @Override + protected ElasticsearchCluster getCluster() { + return cluster; + } +} diff --git a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2KeystoreSessionCredentialsIT.java b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2KeystoreSessionCredentialsIT.java new file mode 100644 index 0000000000000..779cbb922fb79 --- /dev/null +++ b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2KeystoreSessionCredentialsIT.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import fixture.aws.ec2.AwsEc2HttpFixture; + +import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.util.List; + +import static fixture.aws.AwsCredentialsUtils.fixedAccessKeyAndToken; + +public class DiscoveryEc2KeystoreSessionCredentialsIT extends DiscoveryEc2ClusterFormationTestCase { + + private static final String PREFIX = getIdentifierPrefix("DiscoveryEc2KeystoreSessionCredentialsIT"); + private static final String REGION = PREFIX + "-region"; + private static final String ACCESS_KEY = PREFIX + "-access-key"; + private static final String SESSION_TOKEN = PREFIX + "-session-token"; + + private static final AwsEc2HttpFixture ec2ApiFixture = new AwsEc2HttpFixture( + fixedAccessKeyAndToken(ACCESS_KEY, SESSION_TOKEN), + DiscoveryEc2KeystoreSessionCredentialsIT::getAvailableTransportEndpoints + ); + + private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() + .nodes(2) + .plugin("discovery-ec2") + .setting(DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING.getKey(), Ec2DiscoveryPlugin.EC2_SEED_HOSTS_PROVIDER_NAME) + .setting("logger." + AwsEc2SeedHostsProvider.class.getCanonicalName(), "DEBUG") + .setting(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), ec2ApiFixture::getAddress) + .environment("AWS_REGION", REGION) + .keystore("discovery.ec2.access_key", ACCESS_KEY) + .keystore("discovery.ec2.secret_key", ESTestCase::randomIdentifier) + .keystore("discovery.ec2.session_token", SESSION_TOKEN) + .build(); + + private static List getAvailableTransportEndpoints() { + return cluster.getAvailableTransportEndpoints(); + } + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(ec2ApiFixture).around(cluster); + + @Override + protected ElasticsearchCluster getCluster() { + return cluster; + } +} diff --git a/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2SystemPropertyCredentialsIT.java b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2SystemPropertyCredentialsIT.java new file mode 100644 index 0000000000000..babec7b7603e0 --- /dev/null +++ b/plugins/discovery-ec2/src/javaRestTest/java/org/elasticsearch/discovery/ec2/DiscoveryEc2SystemPropertyCredentialsIT.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.discovery.ec2; + +import fixture.aws.ec2.AwsEc2HttpFixture; + +import org.elasticsearch.discovery.DiscoveryModule; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; + +import java.util.List; + +import static fixture.aws.AwsCredentialsUtils.fixedAccessKey; + +public class DiscoveryEc2SystemPropertyCredentialsIT extends DiscoveryEc2ClusterFormationTestCase { + + private static final String PREFIX = getIdentifierPrefix("DiscoveryEc2SystemPropertyCredentialsIT"); + private static final String REGION = PREFIX + "-region"; + private static final String ACCESS_KEY = PREFIX + "-access-key"; + + private static final AwsEc2HttpFixture ec2ApiFixture = new AwsEc2HttpFixture( + fixedAccessKey(ACCESS_KEY), + DiscoveryEc2SystemPropertyCredentialsIT::getAvailableTransportEndpoints + ); + + private static final ElasticsearchCluster cluster = ElasticsearchCluster.local() + .nodes(2) + .plugin("discovery-ec2") + .setting(DiscoveryModule.DISCOVERY_SEED_PROVIDERS_SETTING.getKey(), Ec2DiscoveryPlugin.EC2_SEED_HOSTS_PROVIDER_NAME) + .setting("logger." + AwsEc2SeedHostsProvider.class.getCanonicalName(), "DEBUG") + .setting(Ec2ClientSettings.ENDPOINT_SETTING.getKey(), ec2ApiFixture::getAddress) + .environment("AWS_REGION", REGION) + .systemProperty("aws.accessKeyId", ACCESS_KEY) + .systemProperty("aws.secretKey", ESTestCase::randomIdentifier) + .build(); + + private static List getAvailableTransportEndpoints() { + return cluster.getAvailableTransportEndpoints(); + } + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(ec2ApiFixture).around(cluster); + + @Override + protected ElasticsearchCluster getCluster() { + return cluster; + } +} diff --git a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java index c089b9dfcc0c9..6d50719e62ca9 100644 --- a/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java +++ b/plugins/discovery-ec2/src/main/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryPlugin.java @@ -37,7 +37,6 @@ import java.security.AccessController; import java.security.PrivilegedAction; import java.util.Arrays; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.function.Supplier; @@ -47,7 +46,7 @@ public class Ec2DiscoveryPlugin extends Plugin implements DiscoveryPlugin, ReloadablePlugin { private static final Logger logger = LogManager.getLogger(Ec2DiscoveryPlugin.class); - public static final String EC2 = "ec2"; + public static final String EC2_SEED_HOSTS_PROVIDER_NAME = "ec2"; static { SpecialPermission.check(); @@ -91,7 +90,7 @@ public NetworkService.CustomNameResolver getCustomNameResolver(Settings _setting @Override public Map> getSeedHostProviders(TransportService transportService, NetworkService networkService) { - return Collections.singletonMap(EC2, () -> new AwsEc2SeedHostsProvider(settings, transportService, ec2Service)); + return Map.of(EC2_SEED_HOSTS_PROVIDER_NAME, () -> new AwsEc2SeedHostsProvider(settings, transportService, ec2Service)); } @Override diff --git a/plugins/discovery-ec2/src/main/plugin-metadata/entitlement-policy.yaml b/plugins/discovery-ec2/src/main/plugin-metadata/entitlement-policy.yaml index 4c42ec110a257..df557f9944253 100644 --- a/plugins/discovery-ec2/src/main/plugin-metadata/entitlement-policy.yaml +++ b/plugins/discovery-ec2/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,4 +1,2 @@ ALL-UNNAMED: - - network: - actions: - - connect + - outbound_network diff --git a/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml b/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml new file mode 100644 index 0000000000000..b5020dc1b7468 --- /dev/null +++ b/plugins/repository-hdfs/src/main/plugin-metadata/entitlement-policy.yaml @@ -0,0 +1,5 @@ +ALL-UNNAMED: + - outbound_network + - write_system_properties: + properties: + - hadoop.home.dir diff --git a/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/AddIndexBlockRollingUpgradeIT.java b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/AddIndexBlockRollingUpgradeIT.java new file mode 100644 index 0000000000000..f8d185dbaabdd --- /dev/null +++ b/qa/rolling-upgrade/src/javaRestTest/java/org/elasticsearch/upgrades/AddIndexBlockRollingUpgradeIT.java @@ -0,0 +1,70 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.upgrades; + +import io.netty.handler.codec.http.HttpMethod; + +import com.carrotsearch.randomizedtesting.annotations.Name; + +import org.elasticsearch.TransportVersions; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.cluster.metadata.MetadataIndexStateService; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.util.Map; + +public class AddIndexBlockRollingUpgradeIT extends AbstractRollingUpgradeTestCase { + + private static final String INDEX_NAME = "test_add_block"; + + public AddIndexBlockRollingUpgradeIT(@Name("upgradedNodes") int upgradedNodes) { + super(upgradedNodes); + } + + public void testAddBlock() throws Exception { + if (isOldCluster()) { + createIndex(INDEX_NAME); + } else if (isMixedCluster()) { + blockWrites(); + // this is used both for upgrading from 9.0.0 to current and from 8.18 to current. + if (minimumTransportVersion().before(TransportVersions.ADD_INDEX_BLOCK_TWO_PHASE)) { + assertNull(verifiedSettingValue()); + } else { + assertThat(verifiedSettingValue(), Matchers.equalTo("true")); + } + } else { + assertTrue(isUpgradedCluster()); + blockWrites(); + assertThat(verifiedSettingValue(), Matchers.equalTo("true")); + } + } + + private static void blockWrites() throws IOException { + client().performRequest(new Request(HttpMethod.PUT.name(), "/" + INDEX_NAME + "/_block/write")); + + expectThrows( + ResponseException.class, + () -> client().performRequest( + newXContentRequest(HttpMethod.PUT, "/" + INDEX_NAME + "/_doc/test", (builder, params) -> builder.field("test", "test")) + ) + ); + } + + @SuppressWarnings("unchecked") + private static String verifiedSettingValue() throws IOException { + final var settingsRequest = new Request(HttpMethod.GET.name(), "/" + INDEX_NAME + "/_settings?flat_settings"); + final Map settingsResponse = entityAsMap(client().performRequest(settingsRequest)); + return (String) ((Map) ((Map) settingsResponse.get(INDEX_NAME)).get("settings")).get( + MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey() + ); + } +} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/inference.unified_inference.json b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.unified_inference.json new file mode 100644 index 0000000000000..84182d19f8825 --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/inference.unified_inference.json @@ -0,0 +1,45 @@ +{ + "inference.unified_inference": { + "documentation": { + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/unified-inference-api.html", + "description": "Perform inference using the Unified Schema" + }, + "stability": "stable", + "visibility": "public", + "headers": { + "accept": ["text/event-stream"], + "content_type": ["application/json"] + }, + "url": { + "paths": [ + { + "path": "/_inference/{inference_id}/_unified", + "methods": ["POST"], + "parts": { + "inference_id": { + "type": "string", + "description": "The inference Id" + } + } + }, + { + "path": "/_inference/{task_type}/{inference_id}/_unified", + "methods": ["POST"], + "parts": { + "task_type": { + "type": "string", + "description": "The task type" + }, + "inference_id": { + "type": "string", + "description": "The inference Id" + } + } + } + ] + }, + "body": { + "description": "The inference payload" + } + } +} diff --git a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml index 8e8afafc9f069..631cd8fec6465 100644 --- a/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml +++ b/rest-api-spec/src/yamlRestTest/resources/rest-api-spec/test/indices.put_mapping/10_basic.yml @@ -185,3 +185,92 @@ body: _source: mode: synthetic + +--- +"modify field type with subobjects:false": + - requires: + cluster_features: [ "mapper.subobjects_false_mapping_update_fix" ] + reason: requires fix for mapping updates when [subobjects:false] is set + - do: + indices.create: + index: test_index + body: + mappings: + subobjects: false + properties: + user.id: + type: long + user.name: + type: text + + - do: + catch: bad_request + indices.put_mapping: + index: test_index + body: + properties: + user.id: + type: keyword + + - match: { error.type: "illegal_argument_exception" } + - match: { error.reason: "mapper [user.id] cannot be changed from type [long] to [keyword]" } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + user.name: + type: text + fields: + raw: + type: keyword + + - is_true: acknowledged + + +--- +"modify nested field type with subobjects:false": + - requires: + cluster_features: [ "mapper.subobjects_false_mapping_update_fix" ] + reason: requires fix for mapping updates when [subobjects:false] is set + - do: + indices.create: + index: test_index + body: + mappings: + properties: + path: + properties: + to: + subobjects: false + properties: + user.id: + type: long + user.name: + type: text + + - do: + catch: bad_request + indices.put_mapping: + index: test_index + body: + properties: + path.to.user.id: + type: keyword + + - match: { error.type: "illegal_argument_exception" } + - match: { error.reason: "mapper [path.to.user.id] cannot be changed from type [long] to [keyword]" } + + - do: + indices.put_mapping: + index: test_index + body: + properties: + path.to.user.name: + type: text + fields: + raw: + type: keyword + + - is_true: acknowledged diff --git a/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java index bb4d579f6bed2..93f8997ff24a1 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/blocks/SimpleBlocksIT.java @@ -16,14 +16,21 @@ import org.elasticsearch.action.admin.indices.readonly.AddIndexBlockResponse; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.support.ActiveShardCount; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateTaskListener; +import org.elasticsearch.cluster.SimpleBatchedExecutor; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexMetadata.APIBlock; +import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.MetadataIndexStateService; import org.elasticsearch.cluster.routing.ShardRouting; +import org.elasticsearch.common.Priority; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.test.BackgroundIndexer; import org.elasticsearch.test.ESIntegTestCase; @@ -266,6 +273,74 @@ public void testAddIndexBlock() throws Exception { assertHitCount(prepareSearch(indexName).setSize(0), nbDocs); } + public void testReAddUnverifiedIndexBlock() { + final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + createIndex(indexName); + ensureGreen(indexName); + + final int nbDocs = randomIntBetween(0, 50); + indexRandom( + randomBoolean(), + false, + randomBoolean(), + IntStream.range(0, nbDocs).mapToObj(i -> prepareIndex(indexName).setId(String.valueOf(i)).setSource("num", i)).collect(toList()) + ); + + final APIBlock block = APIBlock.WRITE; + try { + AddIndexBlockResponse response = indicesAdmin().prepareAddBlock(block, indexName).get(); + assertTrue("Add block [" + block + "] to index [" + indexName + "] not acknowledged: " + response, response.isAcknowledged()); + assertIndexHasBlock(block, indexName); + + removeVerified(indexName); + + AddIndexBlockResponse response2 = indicesAdmin().prepareAddBlock(block, indexName).get(); + assertTrue("Add block [" + block + "] to index [" + indexName + "] not acknowledged: " + response, response2.isAcknowledged()); + assertIndexHasBlock(block, indexName); + } finally { + disableIndexBlock(indexName, block); + } + + } + + private static void removeVerified(String indexName) { + PlainActionFuture listener = new PlainActionFuture<>(); + internalCluster().clusterService(internalCluster().getMasterName()) + .createTaskQueue("test", Priority.NORMAL, new SimpleBatchedExecutor<>() { + @Override + public Tuple executeTask( + ClusterStateTaskListener clusterStateTaskListener, + ClusterState clusterState + ) { + + IndexMetadata indexMetadata = clusterState.metadata().index(indexName); + Settings.Builder settingsBuilder = Settings.builder().put(indexMetadata.getSettings()); + settingsBuilder.remove(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey()); + return Tuple.tuple( + ClusterState.builder(clusterState) + .metadata( + Metadata.builder(clusterState.metadata()) + .put( + IndexMetadata.builder(indexMetadata) + .settings(settingsBuilder) + .settingsVersion(indexMetadata.getSettingsVersion() + 1) + ) + ) + .build(), + null + ); + } + + @Override + public void taskSucceeded(ClusterStateTaskListener clusterStateTaskListener, Object ignored) { + listener.onResponse(null); + } + }) + .submitTask("test", e -> fail(e), null); + + listener.actionGet(); + } + public void testSameBlockTwice() throws Exception { final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); createIndex(indexName); @@ -452,6 +527,9 @@ static void assertIndexHasBlock(APIBlock block, final String... indices) { .count(), equalTo(1L) ); + if (block.getBlock().contains(ClusterBlockLevel.WRITE)) { + assertThat(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.get(indexSettings), is(true)); + } } } diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 2a68b65bcdccb..73e9a2058b2b4 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -394,6 +394,7 @@ exports org.elasticsearch.action.downsample; exports org.elasticsearch.plugins.internal to + org.elasticsearch.inference, org.elasticsearch.metering, org.elasticsearch.stateless, org.elasticsearch.settings.secure, diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index e66cb31d6f9a8..148c37b5bd177 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -156,7 +156,10 @@ static TransportVersion def(int id) { public static final TransportVersion ELASTIC_INFERENCE_SERVICE_UNIFIED_CHAT_COMPLETIONS_INTEGRATION = def(8_822_00_0); public static final TransportVersion KQL_QUERY_TECH_PREVIEW = def(8_823_00_0); public static final TransportVersion ESQL_PROFILE_ROWS_PROCESSED = def(8_824_00_0); - public static final TransportVersion BYTE_SIZE_VALUE_ALWAYS_USES_BYTES = def(8_825_00_0); + public static final TransportVersion BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 = def(8_825_00_0); + public static final TransportVersion REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 = def(8_826_00_0); + public static final TransportVersion ESQL_SKIP_ES_INDEX_SERIALIZATION = def(8_827_00_0); + public static final TransportVersion ADD_INDEX_BLOCK_TWO_PHASE = def(8_828_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 8873c9b0e281e..a3e7863c9b094 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -192,8 +192,10 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_16_1 = new Version(8_16_01_99); public static final Version V_8_16_2 = new Version(8_16_02_99); public static final Version V_8_16_3 = new Version(8_16_03_99); + public static final Version V_8_16_4 = new Version(8_16_04_99); public static final Version V_8_17_0 = new Version(8_17_00_99); public static final Version V_8_17_1 = new Version(8_17_01_99); + public static final Version V_8_17_2 = new Version(8_17_02_99); public static final Version V_8_18_0 = new Version(8_18_00_99); public static final Version V_9_0_0 = new Version(9_00_00_99); public static final Version CURRENT = V_9_0_0; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java index 9bc088f944be0..d8e0d4a623ad4 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/AutoCreateAction.java @@ -268,9 +268,9 @@ ClusterState execute( final var dataStream = clusterState.metadata().dataStreams().get(request.index()); final var backingIndexName = dataStream.getIndices().get(0).getName(); - final var indexNames = dataStream.getFailureIndices().getIndices().isEmpty() + final var indexNames = dataStream.getFailureIndices().isEmpty() ? List.of(backingIndexName) - : List.of(backingIndexName, dataStream.getFailureIndices().getIndices().get(0).getName()); + : List.of(backingIndexName, dataStream.getFailureIndices().get(0).getName()); taskContext.success(getAckListener(indexNames, allocationActionMultiListener)); successfulRequests.put(request, indexNames); return clusterState; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockClusterStateUpdateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockClusterStateUpdateRequest.java index 50bd3b37b4cb3..f48c550d73efd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockClusterStateUpdateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockClusterStateUpdateRequest.java @@ -21,6 +21,7 @@ public record AddIndexBlockClusterStateUpdateRequest( TimeValue masterNodeTimeout, TimeValue ackTimeout, APIBlock block, + boolean markVerified, long taskId, Index[] indices ) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java index 5f9bd6399fe7d..20201bf7fe058 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/readonly/AddIndexBlockRequest.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.indices.readonly; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; @@ -32,12 +33,18 @@ public class AddIndexBlockRequest extends AcknowledgedRequest { private final ClusterBlock clusterBlock; + private final boolean phase1; ShardRequest(StreamInput in) throws IOException { super(in); clusterBlock = new ClusterBlock(in); + if (in.getTransportVersion().onOrAfter(TransportVersions.ADD_INDEX_BLOCK_TWO_PHASE)) { + phase1 = in.readBoolean(); + } else { + phase1 = true; // does not matter, not verified anyway + } } - public ShardRequest(final ShardId shardId, final ClusterBlock clusterBlock, final TaskId parentTaskId) { + public ShardRequest(final ShardId shardId, final ClusterBlock clusterBlock, boolean phase1, final TaskId parentTaskId) { super(shardId); this.clusterBlock = Objects.requireNonNull(clusterBlock); + this.phase1 = phase1; setParentTask(parentTaskId); } @Override public String toString() { - return "verify shard " + shardId + " before block with " + clusterBlock; + return "verify shard " + shardId + " before block with " + clusterBlock + " phase1=" + phase1; } @Override public void writeTo(final StreamOutput out) throws IOException { super.writeTo(out); clusterBlock.writeTo(out); + if (out.getTransportVersion().onOrAfter(TransportVersions.ADD_INDEX_BLOCK_TWO_PHASE)) { + out.writeBoolean(phase1); + } } public ClusterBlock clusterBlock() { return clusterBlock; } + + public boolean phase1() { + return phase1; + } } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java index 4aa022aff1c80..0f1b77af0242e 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/resolve/ResolveIndexAction.java @@ -634,7 +634,7 @@ private static void enrichIndexAbstraction( ); } case ALIAS -> { - String[] indexNames = getAliasIndexStream(resolvedExpression, ia, indicesLookup).map(Index::getName) + String[] indexNames = getAliasIndexStream(resolvedExpression, ia, clusterState.metadata()).map(Index::getName) .toArray(String[]::new); Arrays.sort(indexNames); aliases.add(new ResolvedAlias(ia.getName(), indexNames)); @@ -644,11 +644,11 @@ private static void enrichIndexAbstraction( Stream dataStreamIndices = resolvedExpression.selector() == null ? dataStream.getIndices().stream() : switch (resolvedExpression.selector()) { - case DATA -> dataStream.getBackingIndices().getIndices().stream(); - case FAILURES -> dataStream.getFailureIndices().getIndices().stream(); + case DATA -> dataStream.getDataComponent().getIndices().stream(); + case FAILURES -> dataStream.getFailureIndices().stream(); case ALL_APPLICABLE -> Stream.concat( - dataStream.getBackingIndices().getIndices().stream(), - dataStream.getFailureIndices().getIndices().stream() + dataStream.getIndices().stream(), + dataStream.getFailureIndices().stream() ); }; String[] backingIndices = dataStreamIndices.map(Index::getName).toArray(String[]::new); @@ -659,11 +659,7 @@ private static void enrichIndexAbstraction( } } - private static Stream getAliasIndexStream( - ResolvedExpression resolvedExpression, - IndexAbstraction ia, - SortedMap indicesLookup - ) { + private static Stream getAliasIndexStream(ResolvedExpression resolvedExpression, IndexAbstraction ia, Metadata metadata) { Stream aliasIndices; if (resolvedExpression.selector() == null) { aliasIndices = ia.getIndices().stream(); @@ -672,30 +668,11 @@ private static Stream getAliasIndexStream( case DATA -> ia.getIndices().stream(); case FAILURES -> { assert ia.isDataStreamRelated() : "Illegal selector [failures] used on non data stream alias"; - yield ia.getIndices() - .stream() - .map(Index::getName) - .map(indicesLookup::get) - .map(IndexAbstraction::getParentDataStream) - .filter(Objects::nonNull) - .distinct() - .map(DataStream::getFailureIndices) - .flatMap(failureIndices -> failureIndices.getIndices().stream()); + yield ia.getFailureIndices(metadata).stream(); } case ALL_APPLICABLE -> { if (ia.isDataStreamRelated()) { - yield Stream.concat( - ia.getIndices().stream(), - ia.getIndices() - .stream() - .map(Index::getName) - .map(indicesLookup::get) - .map(IndexAbstraction::getParentDataStream) - .filter(Objects::nonNull) - .distinct() - .map(DataStream::getFailureIndices) - .flatMap(failureIndices -> failureIndices.getIndices().stream()) - ); + yield Stream.concat(ia.getIndices().stream(), ia.getFailureIndices(metadata).stream()); } else { yield ia.getIndices().stream(); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java index 4f0aa9c5bade4..ac47144c1f558 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/TransportRolloverAction.java @@ -246,7 +246,7 @@ protected void masterOperation( } // When we're initializing a failure store, we skip the stats request because there is no source index to retrieve stats for. - if (targetFailureStore && ((DataStream) rolloverTargetAbstraction).getFailureIndices().getIndices().isEmpty()) { + if (targetFailureStore && ((DataStream) rolloverTargetAbstraction).getFailureIndices().isEmpty()) { initializeFailureStore(rolloverRequest, listener, trialSourceIndexName, trialRolloverIndexName); return; } diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java index dd473869fb2d9..fcf303ff2c82e 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkOperation.java @@ -654,7 +654,7 @@ private boolean addDocumentToRedirectRequests(BulkItemRequest request, Exception * If so, we'll need to roll it over before we index the failed documents into the failure store. */ private void maybeMarkFailureStoreForRollover(DataStream dataStream) { - if (dataStream.getFailureIndices().isRolloverOnWrite() == false) { + if (dataStream.getFailureComponent().isRolloverOnWrite() == false) { return; } failureStoresToBeRolledOver.add(dataStream.getName()); diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java index 523381321ada7..b3e76e52fda26 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportBulkAction.java @@ -321,9 +321,9 @@ private void populateMissingTargets( // Determine which data streams and failure stores need to be rolled over. DataStream dataStream = state.metadata().dataStreams().get(request.index()); if (dataStream != null) { - if (writeToFailureStore == false && dataStream.getBackingIndices().isRolloverOnWrite()) { + if (writeToFailureStore == false && dataStream.getDataComponent().isRolloverOnWrite()) { dataStreamsToBeRolledOver.add(request.index()); - } else if (lazyRolloverFailureStoreFeature && writeToFailureStore && dataStream.getFailureIndices().isRolloverOnWrite()) { + } else if (lazyRolloverFailureStoreFeature && writeToFailureStore && dataStream.getFailureComponent().isRolloverOnWrite()) { failureStoresToBeRolledOver.add(request.index()); } } diff --git a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java index c55957787aee7..5dd60a1122bf7 100644 --- a/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java +++ b/server/src/main/java/org/elasticsearch/action/datastreams/GetDataStreamAction.java @@ -418,10 +418,10 @@ public XContentBuilder toXContent( builder.field(FAILURE_STORE_ENABLED.getPreferredName(), failureStoreEffectivelyEnabled); builder.field( DataStream.ROLLOVER_ON_WRITE_FIELD.getPreferredName(), - dataStream.getFailureIndices().isRolloverOnWrite() + dataStream.getFailureComponent().isRolloverOnWrite() ); - indicesToXContent(builder, dataStream.getFailureIndices().getIndices()); - addAutoShardingEvent(builder, params, dataStream.getFailureIndices().getAutoShardingEvent()); + indicesToXContent(builder, dataStream.getFailureIndices()); + addAutoShardingEvent(builder, params, dataStream.getFailureComponent().getAutoShardingEvent()); builder.endObject(); } builder.endObject(); diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 4343451256920..bcb8a7fb78bf3 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -885,12 +885,12 @@ public Index getConcreteWriteIndex(IndexAbstraction ia, Metadata metadata) { // Resolve write index and get parent data stream to handle the case of dealing with an alias String defaultWriteIndexName = ia.getWriteIndex().getName(); DataStream dataStream = metadata.getIndicesLookup().get(defaultWriteIndexName).getParentDataStream(); - if (dataStream.getFailureIndices().getIndices().size() < 1) { + if (dataStream.getFailureIndices().size() < 1) { throw new ElasticsearchException( "Attempting to write a document to a failure store but the target data stream does not have one enabled" ); } - return dataStream.getFailureIndices().getIndices().get(dataStream.getFailureIndices().getIndices().size() - 1); + return dataStream.getFailureIndices().get(dataStream.getFailureIndices().size() - 1); } else { // Resolve as normal return ia.getWriteIndex(this, metadata); diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index 73e6a0306247d..aeea0a5d65c8a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -10,7 +10,6 @@ package org.elasticsearch.action.search; import org.apache.logging.log4j.Logger; -import org.apache.lucene.util.CollectionUtil; import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ExceptionsHelper; @@ -26,6 +25,7 @@ import org.elasticsearch.cluster.routing.GroupShardsIterator; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -43,8 +43,7 @@ import org.elasticsearch.transport.Transport; import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; +import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @@ -98,7 +97,6 @@ abstract class AbstractSearchAsyncAction exten protected final GroupShardsIterator toSkipShardsIts; protected final GroupShardsIterator shardsIts; private final SearchShardIterator[] shardIterators; - private final Map shardIndexMap; private final int expectedTotalOps; private final AtomicInteger totalOps = new AtomicInteger(); private final int maxConcurrentRequestsPerNode; @@ -142,17 +140,11 @@ abstract class AbstractSearchAsyncAction exten this.toSkipShardsIts = new GroupShardsIterator<>(toSkipIterators); this.shardsIts = new GroupShardsIterator<>(iterators); - // we compute the shard index based on the natural order of the shards + this.shardIterators = iterators.toArray(new SearchShardIterator[0]); + // we later compute the shard index based on the natural order of the shards // that participate in the search request. This means that this number is // consistent between two requests that target the same shards. - Map shardMap = new HashMap<>(); - List searchIterators = new ArrayList<>(iterators); - CollectionUtil.timSort(searchIterators); - for (int i = 0; i < searchIterators.size(); i++) { - shardMap.put(searchIterators.get(i), i); - } - this.shardIndexMap = Collections.unmodifiableMap(shardMap); - this.shardIterators = searchIterators.toArray(SearchShardIterator[]::new); + Arrays.sort(shardIterators); // we need to add 1 for non active partition, since we count it in the total. This means for each shard in the iterator we sum up // it's number of active shards but use 1 as the default if no replica of a shard is active at this point. @@ -236,6 +228,10 @@ protected final void run() { assert iterator.skip(); skipShard(iterator); } + final Map shardIndexMap = Maps.newHashMapWithExpectedSize(shardIterators.length); + for (int i = 0; i < shardIterators.length; i++) { + shardIndexMap.put(shardIterators[i], i); + } if (shardsIts.size() > 0) { doCheckNoMissingShards(getName(), request, shardsIts); for (int i = 0; i < shardsIts.size(); i++) { diff --git a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java index 119cfcab76105..22ad670c6204d 100644 --- a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java @@ -158,30 +158,31 @@ private void innerRunFetch(ScoreDoc[] scoreDocs, int numShards, SearchPhaseContr ); for (int i = 0; i < docIdsToLoad.length; i++) { List entry = docIdsToLoad[i]; - RankDocShardInfo rankDocs = rankDocsPerShard == null || rankDocsPerShard.get(i).isEmpty() - ? null - : new RankDocShardInfo(rankDocsPerShard.get(i)); - SearchPhaseResult shardPhaseResult = searchPhaseShardResults.get(i); if (entry == null) { // no results for this shard ID - if (shardPhaseResult != null) { - // if we got some hits from this shard we have to release the context there - // we do this as we go since it will free up resources and passing on the request on the - // transport layer is cheap. - releaseIrrelevantSearchContext(shardPhaseResult, context); - progressListener.notifyFetchResult(i); - } + // if we got some hits from this shard we have to release the context + // we do this below after sending out the fetch requests relevant to the search to give priority to those requests + // that contribute to the final search response // in any case we count down this result since we don't talk to this shard anymore counter.countDown(); } else { executeFetch( - shardPhaseResult, + searchPhaseShardResults.get(i), counter, entry, - rankDocs, + rankDocsPerShard == null || rankDocsPerShard.get(i).isEmpty() ? null : new RankDocShardInfo(rankDocsPerShard.get(i)), (lastEmittedDocPerShard != null) ? lastEmittedDocPerShard[i] : null ); } } + for (int i = 0; i < docIdsToLoad.length; i++) { + if (docIdsToLoad[i] == null) { + SearchPhaseResult shardPhaseResult = searchPhaseShardResults.get(i); + if (shardPhaseResult != null) { + releaseIrrelevantSearchContext(shardPhaseResult, context); + progressListener.notifyFetchResult(i); + } + } + } } private List> splitRankDocsPerShard(ScoreDoc[] scoreDocs, int numShards) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 286ecb3113337..1313060936f63 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -259,6 +259,15 @@ public List getIndices() { return backingIndices.indices; } + @Override + public List getFailureIndices(Metadata ignored) { + return failureIndices.indices; + } + + public List getFailureIndices() { + return failureIndices.indices; + } + public long getGeneration() { return generation; } @@ -268,11 +277,20 @@ public Index getWriteIndex() { return backingIndices.getWriteIndex(); } + /** + * @param metadata is not necessary for data streams + * @return the write failure index if the failure store is enabled and there is already at least one failure, null otherwise + */ + @Override + public Index getWriteFailureIndex(Metadata metadata) { + return getWriteFailureIndex(); + } + /** * @return the write failure index if the failure store is enabled and there is already at least one failure, null otherwise */ @Nullable - public Index getFailureStoreWriteIndex() { + public Index getWriteFailureIndex() { return failureIndices.indices.isEmpty() ? null : failureIndices.getWriteIndex(); } @@ -495,11 +513,11 @@ public DataStreamAutoShardingEvent getAutoShardingEvent() { return backingIndices.autoShardingEvent; } - public DataStreamIndices getBackingIndices() { + public DataStreamIndices getDataComponent() { return backingIndices; } - public DataStreamIndices getFailureIndices() { + public DataStreamIndices getFailureComponent() { return failureIndices; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java index 0585263c45e99..8429876f9f937 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexAbstraction.java @@ -38,6 +38,16 @@ public interface IndexAbstraction { */ List getIndices(); + /** + * It retrieves the failure indices of an index abstraction given it supports the failure store. + * @param metadata certain abstractions require the matadata to lazily retrieve the failure indices. + * @return All concrete failure indices this index abstraction is referring to. If the failure store is + * not supported, it returns an empty list. + */ + default List getFailureIndices(@Nullable Metadata metadata) { + return List.of(); + } + /** * A write index is a dedicated concrete index, that accepts all the new documents that belong to an index abstraction. *

@@ -50,6 +60,18 @@ public interface IndexAbstraction { @Nullable Index getWriteIndex(); + /** + * A write failure index is a dedicated concrete index, that accepts all the new documents that belong to the failure store of + * an index abstraction. Only an index abstraction with true {@link #isDataStreamRelated()} supports a failure store. + * @param metadata certain index abstraction require the metadata to lazily retrieve the failure indices + * @return the write failure index of this index abstraction or null if this index abstraction doesn't have + * a write failure index or it does not support the failure store. + */ + @Nullable + default Index getWriteFailureIndex(Metadata metadata) { + return null; + } + default Index getWriteIndex(IndexRequest request, Metadata metadata) { return getWriteIndex(); } @@ -199,9 +221,10 @@ class Alias implements IndexAbstraction { private final boolean isHidden; private final boolean isSystem; private final boolean dataStreamAlias; + private final List dataStreams; public Alias(AliasMetadata aliasMetadata, List indexMetadatas) { - // note: don't capture a reference to any of these indexMetadatas here + // note: don't capture a reference to any of these indexMetadata here this.aliasName = aliasMetadata.getAlias(); this.referenceIndices = new ArrayList<>(indexMetadatas.size()); boolean isSystem = true; @@ -226,15 +249,22 @@ public Alias(AliasMetadata aliasMetadata, List indexMetadatas) { this.isHidden = aliasMetadata.isHidden() == null ? false : aliasMetadata.isHidden(); this.isSystem = isSystem; dataStreamAlias = false; + dataStreams = List.of(); } - public Alias(DataStreamAlias dataStreamAlias, List indicesOfAllDataStreams, Index writeIndexOfWriteDataStream) { + public Alias( + DataStreamAlias dataStreamAlias, + List indicesOfAllDataStreams, + Index writeIndexOfWriteDataStream, + List dataStreams + ) { this.aliasName = dataStreamAlias.getName(); this.referenceIndices = indicesOfAllDataStreams; this.writeIndex = writeIndexOfWriteDataStream; this.isHidden = false; this.isSystem = false; this.dataStreamAlias = true; + this.dataStreams = dataStreams; } @Override @@ -251,11 +281,38 @@ public List getIndices() { return referenceIndices; } + @Override + public List getFailureIndices(Metadata metadata) { + if (isDataStreamRelated() == false) { + return List.of(); + } + assert metadata != null : "metadata must not be null to be able to retrieve the failure indices"; + List failureIndices = new ArrayList<>(); + for (String dataStreamName : dataStreams) { + DataStream dataStream = metadata.dataStreams().get(dataStreamName); + if (dataStream != null && dataStream.getFailureIndices().isEmpty() == false) { + failureIndices.addAll(dataStream.getFailureIndices()); + } + } + return failureIndices; + } + @Nullable public Index getWriteIndex() { return writeIndex; } + @Nullable + @Override + public Index getWriteFailureIndex(Metadata metadata) { + if (isDataStreamRelated() == false || writeIndex == null) { + return null; + } + assert metadata != null : "metadata must not be null to be able to retrieve the failure indices"; + DataStream dataStream = metadata.getIndicesLookup().get(writeIndex.getName()).getParentDataStream(); + return dataStream == null ? null : dataStream.getWriteFailureIndex(); + } + @Override public Index getWriteIndex(IndexRequest request, Metadata metadata) { if (dataStreamAlias == false) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java index 8b88f7609fa8c..9ad00b517d51c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexNameExpressionResolver.java @@ -491,27 +491,28 @@ Index[] concreteIndices(Context context, String... indexExpressions) { for (ResolvedExpression expression : expressions) { final IndexAbstraction indexAbstraction = indicesLookup.get(expression.resource()); assert indexAbstraction != null; - if (indexAbstraction.getType() == Type.ALIAS && context.isResolveToWriteIndex()) { - Index writeIndex = indexAbstraction.getWriteIndex(); - if (writeIndex == null) { - throw new IllegalArgumentException( - "no write index is defined for alias [" - + indexAbstraction.getName() - + "]." - + " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple" - + " indices without one being designated as a write index" - ); - } - if (indexAbstraction.isDataStreamRelated()) { - DataStream dataStream = indicesLookup.get(indexAbstraction.getWriteIndex().getName()).getParentDataStream(); - resolveWriteIndexForDataStreams(context, dataStream, concreteIndicesResult, expression.selector()); - } else { + if (context.isResolveToWriteIndex()) { + if (shouldIncludeRegularIndices(context.getOptions(), expression.selector())) { + Index writeIndex = indexAbstraction.getWriteIndex(); + if (writeIndex == null && indexAbstraction.getType() == Type.ALIAS) { + throw new IllegalArgumentException( + "no write index is defined for alias [" + + indexAbstraction.getName() + + "]." + + " The write index may be explicitly disabled using is_write_index=false or the alias points to multiple" + + " indices without one being designated as a write index" + ); + } if (addIndex(writeIndex, null, context)) { concreteIndicesResult.add(writeIndex); } } - } else if (indexAbstraction.getType() == Type.DATA_STREAM && context.isResolveToWriteIndex()) { - resolveWriteIndexForDataStreams(context, (DataStream) indexAbstraction, concreteIndicesResult, expression.selector()); + if (shouldIncludeFailureIndices(context.getOptions(), expression.selector())) { + Index failureStoreWriteIndex = indexAbstraction.getWriteFailureIndex(context.state.metadata()); + if (failureStoreWriteIndex != null && addIndex(failureStoreWriteIndex, null, context)) { + concreteIndicesResult.add(failureStoreWriteIndex); + } + } } else { if (context.getOptions().allowAliasesToMultipleIndices() == false && resolvesToMoreThanOneIndex(indexAbstraction, context, expression)) { @@ -530,23 +531,17 @@ && resolvesToMoreThanOneIndex(indexAbstraction, context, expression)) { ); } - if (indexAbstraction.getType() == Type.DATA_STREAM) { - resolveIndicesForDataStream(context, (DataStream) indexAbstraction, concreteIndicesResult, expression.selector()); - } else if (indexAbstraction.getType() == Type.ALIAS - && indexAbstraction.isDataStreamRelated() - && shouldIncludeFailureIndices(context.getOptions(), expression.selector())) { - for (DataStream dataStream : getAliasDataStreams(indexAbstraction, indicesLookup)) { - resolveIndicesForDataStream(context, dataStream, concreteIndicesResult, expression.selector()); - } - } else { - List indices = indexAbstraction.getIndices(); - for (int i = 0, n = indices.size(); i < n; i++) { - Index index = indices.get(i); - if (shouldTrackConcreteIndex(context, index)) { - concreteIndicesResult.add(index); - } + if (indexAbstraction.isDataStreamRelated()) { + resolveIndicesForDataStreamRelatedAbstraction(context, indexAbstraction, concreteIndicesResult, expression.selector()); + } else { + List indices = indexAbstraction.getIndices(); + for (int i = 0, n = indices.size(); i < n; i++) { + Index index = indices.get(i); + if (shouldTrackConcreteIndex(context, index)) { + concreteIndicesResult.add(index); } } + } } } @@ -558,30 +553,14 @@ && shouldIncludeFailureIndices(context.getOptions(), expression.selector())) { return resultArray; } - private static Set getAliasDataStreams(IndexAbstraction indexAbstraction, Map indicesLookup) { - // Collect the data streams involved with the alias - assert indexAbstraction.getType().equals(Type.ALIAS) && indexAbstraction.isDataStreamRelated() - : "Non data stream alias [" + indexAbstraction.getName() + "]"; - Set aliasDataStreams = new HashSet<>(); - List indices = indexAbstraction.getIndices(); - for (int i = 0, n = indices.size(); i < n; i++) { - Index index = indices.get(i); - DataStream parentDataStream = indicesLookup.get(index.getName()).getParentDataStream(); - if (parentDataStream != null) { - aliasDataStreams.add(parentDataStream); - } - } - return aliasDataStreams; - } - - private static void resolveIndicesForDataStream( + private static void resolveIndicesForDataStreamRelatedAbstraction( Context context, - DataStream dataStream, + IndexAbstraction indexAbstraction, Set concreteIndicesResult, IndexComponentSelector selector ) { if (shouldIncludeRegularIndices(context.getOptions(), selector)) { - List indices = dataStream.getIndices(); + List indices = indexAbstraction.getIndices(); for (int i = 0, n = indices.size(); i < n; i++) { Index index = indices.get(i); if (shouldTrackConcreteIndex(context, index)) { @@ -590,7 +569,7 @@ private static void resolveIndicesForDataStream( } } if (shouldIncludeFailureIndices(context.getOptions(), selector)) { - List failureIndices = dataStream.getFailureIndices().getIndices(); + List failureIndices = indexAbstraction.getFailureIndices(context.state.metadata()); for (int i = 0, n = failureIndices.size(); i < n; i++) { Index index = failureIndices.get(i); if (shouldTrackConcreteIndex(context, index)) { @@ -613,7 +592,7 @@ private static void resolveWriteIndexForDataStreams( } } if (shouldIncludeFailureIndices(context.getOptions(), selector)) { - Index failureStoreWriteIndex = dataStream.getFailureStoreWriteIndex(); + Index failureStoreWriteIndex = dataStream.getWriteFailureIndex(); if (failureStoreWriteIndex != null && addIndex(failureStoreWriteIndex, null, context)) { concreteIndicesResult.add(failureStoreWriteIndex); } @@ -652,7 +631,7 @@ private static boolean resolvesToMoreThanOneIndex(IndexAbstraction indexAbstract count += parentDataStream.getIndices().size(); } if (shouldIncludeFailureIndices(context.getOptions(), expression.selector())) { - count += parentDataStream.getFailureIndices().getIndices().size(); + count += parentDataStream.getFailureIndices().size(); } if (count > 1) { // Early out if we already have more than one index accounted @@ -669,7 +648,7 @@ private static boolean resolvesToMoreThanOneIndex(IndexAbstraction indexAbstract count += dataStream.getIndices().size(); } if (shouldIncludeFailureIndices(context.getOptions(), expression.selector())) { - count += dataStream.getFailureIndices().getIndices().size(); + count += dataStream.getFailureIndices().size(); } return count > 1; } @@ -973,7 +952,7 @@ public String[] indexAliases( IndexAbstraction ia = state.metadata().getIndicesLookup().get(index); DataStream dataStream = ia.getParentDataStream(); if (dataStream != null) { - if (dataStream.getFailureIndices().containsIndex(index)) { + if (dataStream.getFailureComponent().containsIndex(index)) { // Alias filters are not applied against indices in an abstraction's failure component. // They do not match the mapping of the data stream nor are the documents mapped for searching. return null; @@ -1100,10 +1079,10 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab aliasIndices.addAll(indexAbstraction.getIndices()); } if (shouldIncludeFailureIndices(context.getOptions(), selector) && indexAbstraction.isDataStreamRelated()) { - Set dataStreams = getAliasDataStreams(indexAbstraction, context.state.metadata().getIndicesLookup()); - aliasIndices = aliasIndices == null ? new ArrayList<>(dataStreams.size()) : aliasIndices; - for (DataStream dataStream : dataStreams) { - aliasIndices.addAll(dataStream.getFailureIndices().getIndices()); + List failureIndices = indexAbstraction.getFailureIndices(context.state.metadata()); + if (failureIndices.isEmpty() == false) { + aliasIndices = aliasIndices == null ? new ArrayList<>(failureIndices.size()) : aliasIndices; + aliasIndices.addAll(failureIndices); } } aliasIndices = aliasIndices == null ? List.of() : aliasIndices; @@ -1150,8 +1129,8 @@ public Map> resolveSearchRouting(ClusterState state, @Nullab } } if (shouldIncludeFailureIndices(context.getOptions(), resolvedExpression.selector())) { - if (dataStream.getFailureIndices().getIndices() != null) { - for (Index failureIndex : dataStream.getFailureIndices().getIndices()) { + if (dataStream.getFailureIndices().isEmpty() == false) { + for (Index failureIndex : dataStream.getFailureIndices()) { String concreteIndex = failureIndex.getName(); routings = collectRoutings(routings, paramRouting, norouting, concreteIndex); } @@ -1730,22 +1709,10 @@ private static Set expandToOpenClosed( } } if (shouldIncludeFailureIndices(context.getOptions(), selector)) { - if (indexAbstraction.getType() == Type.ALIAS && indexAbstraction.isDataStreamRelated()) { - Set aliasDataStreams = getAliasDataStreams( - indexAbstraction, - context.state.metadata().getIndicesLookup() - ); - for (DataStream ds : aliasDataStreams) { - List failureIndices = ds.getFailureIndices().getIndices(); - for (int i = 0; i < failureIndices.size(); i++) { - Index index = failureIndices.get(i); - resources.add(new ResolvedExpression(index.getName(), IndexComponentSelector.DATA)); - } - } - } else if (indexAbstraction.getType() == Type.DATA_STREAM) { - DataStream dataStream = (DataStream) indexAbstraction; - for (int i = 0, n = dataStream.getFailureIndices().getIndices().size(); i < n; i++) { - Index index = dataStream.getFailureIndices().getIndices().get(i); + if (indexAbstraction.isDataStreamRelated()) { + List failureIndices = indexAbstraction.getFailureIndices(context.state.metadata()); + for (int i = 0, n = failureIndices.size(); i < n; i++) { + Index index = failureIndices.get(i); IndexMetadata indexMetadata = context.state.metadata().index(index); if (indexMetadata.getState() != excludeState) { resources.add(new ResolvedExpression(index.getName(), IndexComponentSelector.DATA)); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java index 6a4f24818003e..35e853cdd55a9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/Metadata.java @@ -2492,7 +2492,7 @@ private static boolean assertContainsIndexIfDataStream(DataStream parent, IndexM assert parent == null || parent.getIndices().stream().anyMatch(index -> indexMetadata.getIndex().getName().equals(index.getName())) || (DataStream.isFailureStoreFeatureFlagEnabled() - && parent.getFailureIndices() + && parent.getFailureComponent() .getIndices() .stream() .anyMatch(index -> indexMetadata.getIndex().getName().equals(index.getName()))) @@ -2518,7 +2518,7 @@ private static void collectDataStreams( indexToDataStreamLookup.put(i.getName(), dataStream); } if (DataStream.isFailureStoreFeatureFlagEnabled()) { - for (Index i : dataStream.getFailureIndices().getIndices()) { + for (Index i : dataStream.getFailureIndices()) { indexToDataStreamLookup.put(i.getName(), dataStream); } } @@ -2534,7 +2534,8 @@ private static IndexAbstraction.Alias makeDsAliasAbstraction(Map dataStreams.get(name).getIndices().stream()).toList(), - writeIndexOfWriteDataStream + writeIndexOfWriteDataStream, + alias.getDataStreams() ); } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java index 254646f8e71a9..d4be8b2ddf5b6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamService.java @@ -111,8 +111,8 @@ public ClusterState execute(ClusterState currentState) throws Exception { ClusterState clusterState = createDataStream(request, currentState, delegate.reroute(), false); DataStream createdDataStream = clusterState.metadata().dataStreams().get(request.name); firstBackingIndexRef.set(createdDataStream.getIndices().get(0).getName()); - if (createdDataStream.getFailureIndices().getIndices().isEmpty() == false) { - firstFailureStoreRef.set(createdDataStream.getFailureIndices().getIndices().get(0).getName()); + if (createdDataStream.getFailureIndices().isEmpty() == false) { + firstFailureStoreRef.set(createdDataStream.getFailureIndices().get(0).getName()); } return clusterState; } diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java index db3973c1a15a8..71be34db9626f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsService.java @@ -366,7 +366,7 @@ private static void removeBackingIndex( ) { boolean indexNotRemoved = true; DataStream dataStream = validateDataStream(metadata, dataStreamName); - List targetIndices = failureStore ? dataStream.getFailureIndices().getIndices() : dataStream.getIndices(); + List targetIndices = failureStore ? dataStream.getFailureIndices() : dataStream.getIndices(); for (Index backingIndex : targetIndices) { if (backingIndex.getName().equals(indexName)) { if (failureStore) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexService.java index 5d1a037d6bc3e..df9fc71dd1b76 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexService.java @@ -152,7 +152,7 @@ public static ClusterState deleteIndices(ClusterState currentState, Set i IndexMetadata im = meta.getIndexSafe(index); DataStream parent = meta.getIndicesLookup().get(im.getIndex().getName()).getParentDataStream(); if (parent != null) { - boolean isFailureStoreWriteIndex = im.getIndex().equals(parent.getFailureStoreWriteIndex()); + boolean isFailureStoreWriteIndex = im.getIndex().equals(parent.getWriteFailureIndex()); if (isFailureStoreWriteIndex || im.getIndex().equals(parent.getWriteIndex())) { throw new IllegalArgumentException( "index [" diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java index 21b07571a4359..6c0eb9272b288 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataIndexStateService.java @@ -13,6 +13,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.admin.indices.close.CloseIndexClusterStateUpdateRequest; @@ -387,11 +388,18 @@ private static Tuple> addIndexBlock( ) { final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); + final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); final Set indicesToAddBlock = new HashSet<>(); for (Index index : indices) { - metadata.getSafe(index); // to check if index exists + IndexMetadata indexMetadata = metadata.getSafe(index);// to check if index exists if (currentState.blocks().hasIndexBlock(index.getName(), block.block)) { - logger.debug("index {} already has block {}, ignoring", index, block.block); + if (block.block.contains(ClusterBlockLevel.WRITE) && isIndexWriteBlockVerified(indexMetadata)) { + logger.debug("index {} already has block {}, ignoring", index, block.block); + } else { + // remove the block, we'll add a uuid based block below instead, never leaving it unblocked. + blocks.removeIndexBlock(index.getName(), block.block); + indicesToAddBlock.add(index); + } } else { indicesToAddBlock.add(index); } @@ -401,7 +409,6 @@ private static Tuple> addIndexBlock( return Tuple.tuple(currentState, Map.of()); } - final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); final Map blockedIndices = new HashMap<>(); for (Index index : indicesToAddBlock) { @@ -409,7 +416,7 @@ private static Tuple> addIndexBlock( final Set clusterBlocks = currentState.blocks().indices().get(index.getName()); if (clusterBlocks != null) { for (ClusterBlock clusterBlock : clusterBlocks) { - if (clusterBlock.id() == block.block.id()) { + if (clusterBlock.id() == block.block.id() && clusterBlock.uuid() != null) { // Reuse the existing UUID-based block indexBlock = clusterBlock; break; @@ -442,6 +449,10 @@ private static Tuple> addIndexBlock( return Tuple.tuple(ClusterState.builder(currentState).blocks(blocks).metadata(metadata).build(), blockedIndices); } + private static boolean isIndexWriteBlockVerified(IndexMetadata indexMetadata) { + return VERIFIED_READ_ONLY_SETTING.get(indexMetadata.getSettings()); + } + /** * Adds an index block based on the given request, and notifies the listener upon completion. * Adding blocks is done in three steps: @@ -450,7 +461,7 @@ private static Tuple> addIndexBlock( * - Second, shards are checked to have properly applied the UUID-based block. * (see {@link WaitForBlocksApplied}). * - Third, the temporary UUID-based block is turned into a full block - * (see {@link #finalizeBlock(ClusterState, Map, Map, APIBlock)}. + * (see {@link #finalizeBlock(ClusterState, Map, Map, APIBlock, boolean)}. * Using this three-step process ensures non-interference by other operations in case where * we notify successful completion here. */ @@ -511,7 +522,16 @@ public void taskSucceeded(AddBlocksTask task, Map blockedIn + "]-[" + blockedIndices.keySet().stream().map(Index::getName).collect(Collectors.joining(", ")) + "]", - new FinalizeBlocksTask(task.request, blockedIndices, verifyResults, delegate2), + new FinalizeBlocksTask( + task.request, + blockedIndices, + verifyResults, + task.request().markVerified() + && clusterService.state() + .getMinTransportVersion() + .onOrAfter(TransportVersions.ADD_INDEX_BLOCK_TWO_PHASE), + delegate2 + ), null ) ) @@ -539,7 +559,8 @@ public Tuple> executeTask(FinalizeBlocksTask clusterState, task.blockedIndices, task.verifyResults, - task.request.block() + task.request.block(), + task.markVerified() ); assert finalizeResult.v2().size() == task.verifyResults.size(); return finalizeResult; @@ -556,6 +577,7 @@ private record FinalizeBlocksTask( AddIndexBlockClusterStateUpdateRequest request, Map blockedIndices, Map verifyResults, + boolean markVerified, ActionListener listener ) implements ClusterStateTaskListener { @Override @@ -805,10 +827,21 @@ private void sendVerifyShardBlockRequest( final TransportVerifyShardIndexBlockAction.ShardRequest shardRequest = new TransportVerifyShardIndexBlockAction.ShardRequest( shardId, block, + true, parentTaskId ); shardRequest.timeout(request.ackTimeout()); - client.executeLocally(TransportVerifyShardIndexBlockAction.TYPE, shardRequest, listener); + client.executeLocally( + TransportVerifyShardIndexBlockAction.TYPE, + shardRequest, + listener.delegateFailure((delegate, replicationResponse) -> { + final var phase2 = new TransportVerifyShardIndexBlockAction.ShardRequest(shardId, block, false, parentTaskId); + if (request.ackTimeout() != null) { + phase2.timeout(request.ackTimeout()); + } + client.executeLocally(TransportVerifyShardIndexBlockAction.TYPE, phase2, delegate); + }) + ); } } @@ -959,15 +992,18 @@ private void onlyOpenIndices(final OpenIndexClusterStateUpdateRequest request, f * @param blockedIndices the indices and their temporary UUID-based blocks to convert * @param verifyResult the index-level results for adding the block * @param block the full block to convert to + * @param markVerified if the index should be marked verified in case of a write-level block. * @return the updated cluster state, as well as the (failed and successful) index-level results for adding the block */ private static Tuple> finalizeBlock( final ClusterState currentState, final Map blockedIndices, final Map verifyResult, - final APIBlock block + final APIBlock block, + final boolean markVerified ) { final ClusterBlocks.Builder blocks = ClusterBlocks.builder(currentState.blocks()); + final Metadata.Builder metadata = Metadata.builder(currentState.metadata()); final Set effectivelyBlockedIndices = new HashSet<>(); Map blockingResults = new HashMap<>(verifyResult); @@ -1015,12 +1051,25 @@ private static Tuple> finalizeBlock( logger.debug("add block {} to index {} succeeded", block.block, index); effectivelyBlockedIndices.add(index.getName()); + + if (block.getBlock().contains(ClusterBlockLevel.WRITE) && markVerified) { + final IndexMetadata indexMetadata = metadata.getSafe(index); + if (VERIFIED_READ_ONLY_SETTING.get(indexMetadata.getSettings()) == false) { + final IndexMetadata.Builder updatedMetadata = IndexMetadata.builder(indexMetadata) + .settings(Settings.builder().put(indexMetadata.getSettings()).put(VERIFIED_READ_ONLY_SETTING.getKey(), true)) + .settingsVersion(indexMetadata.getSettingsVersion() + 1); + metadata.put(updatedMetadata); + } + } } catch (final IndexNotFoundException e) { logger.debug("index {} has been deleted since blocking it started, ignoring", index); } } logger.info("completed adding [index.blocks.{}] block to indices {}", block.name, effectivelyBlockedIndices); - return Tuple.tuple(ClusterState.builder(currentState).blocks(blocks).build(), List.copyOf(blockingResults.values())); + return Tuple.tuple( + ClusterState.builder(currentState).metadata(metadata).blocks(blocks).build(), + List.copyOf(blockingResults.values()) + ); } /** diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java index 4fcbd4165423b..e984768277d27 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetadataUpdateSettingsService.java @@ -19,6 +19,7 @@ import org.elasticsearch.cluster.ClusterStateAckListener; import org.elasticsearch.cluster.ClusterStateTaskListener; import org.elasticsearch.cluster.block.ClusterBlock; +import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.block.ClusterBlocks; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.IndexRoutingTable; @@ -329,7 +330,7 @@ ClusterState execute(ClusterState currentState) { final ClusterBlocks.Builder blocks = ClusterBlocks.builder().blocks(currentState.blocks()); boolean changedBlocks = false; for (IndexMetadata.APIBlock block : IndexMetadata.APIBlock.values()) { - changedBlocks |= maybeUpdateClusterBlock(actualIndices, blocks, block.block, block.setting, openSettings); + changedBlocks |= maybeUpdateClusterBlock(actualIndices, blocks, block.block, block.setting, openSettings, metadataBuilder); } changed |= changedBlocks; @@ -424,7 +425,8 @@ private static boolean maybeUpdateClusterBlock( ClusterBlocks.Builder blocks, ClusterBlock block, Setting setting, - Settings openSettings + Settings openSettings, + Metadata.Builder metadataBuilder ) { boolean changed = false; if (setting.exists(openSettings)) { @@ -439,6 +441,12 @@ private static boolean maybeUpdateClusterBlock( if (blocks.hasIndexBlock(index, block)) { blocks.removeIndexBlock(index, block); changed = true; + if (block.contains(ClusterBlockLevel.WRITE)) { + IndexMetadata indexMetadata = metadataBuilder.get(index); + Settings.Builder indexSettings = Settings.builder().put(indexMetadata.getSettings()); + indexSettings.remove(MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.getKey()); + metadataBuilder.put(IndexMetadata.builder(indexMetadata).settings(indexSettings)); + } } } } diff --git a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java index c0fe0bc32fb08..5f2b50705d625 100644 --- a/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java +++ b/server/src/main/java/org/elasticsearch/common/ReferenceDocs.java @@ -83,6 +83,7 @@ public enum ReferenceDocs { ALLOCATION_EXPLAIN_NO_COPIES, ALLOCATION_EXPLAIN_MAX_RETRY, SECURE_SETTINGS, + CLUSTER_SHARD_LIMIT, // this comment keeps the ';' on the next line so every entry above has a trailing ',' which makes the diff for adding new links cleaner ; diff --git a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java index b5a513777756f..35289352d7daf 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/IndexScopedSettings.java @@ -163,6 +163,7 @@ public final class IndexScopedSettings extends AbstractScopedSettings { MapperService.INDEX_MAPPING_DEPTH_LIMIT_SETTING, MapperService.INDEX_MAPPING_DIMENSION_FIELDS_LIMIT_SETTING, MapperService.INDEX_MAPPING_FIELD_NAME_LENGTH_LIMIT_SETTING, + MapperService.INDEX_MAPPER_DYNAMIC_SETTING, BitsetFilterCache.INDEX_LOAD_RANDOM_ACCESS_FILTERS_EAGERLY_SETTING, IndexModule.INDEX_STORE_TYPE_SETTING, IndexModule.INDEX_STORE_PRE_LOAD_SETTING, diff --git a/server/src/main/java/org/elasticsearch/common/settings/Setting.java b/server/src/main/java/org/elasticsearch/common/settings/Setting.java index 16c6844f46402..0c7cb8612d383 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/Setting.java +++ b/server/src/main/java/org/elasticsearch/common/settings/Setting.java @@ -151,7 +151,8 @@ public enum Property { * Indicates that this index-level setting was deprecated in {@link Version#V_7_17_0} and is * forbidden in indices created from {@link Version#V_8_0_0} onwards. */ - @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // introduce IndexSettingDeprecatedInV8AndRemovedInV9 to replace this constant + @UpdateForV9(owner = UpdateForV9.Owner.CORE_INFRA) // introduce IndexSettingDeprecatedInV8AndRemovedInV10 + // note we still need v7 settings in v9 because we support reading from N-2 indices now IndexSettingDeprecatedInV7AndRemovedInV8, /** diff --git a/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java b/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java index 3a4f832d6adc1..23d76abdec2f2 100644 --- a/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java +++ b/server/src/main/java/org/elasticsearch/common/unit/ByteSizeValue.java @@ -10,6 +10,7 @@ package org.elasticsearch.common.unit; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.TransportVersion; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -24,7 +25,8 @@ import java.util.Locale; import java.util.Objects; -import static org.elasticsearch.TransportVersions.BYTE_SIZE_VALUE_ALWAYS_USES_BYTES; +import static org.elasticsearch.TransportVersions.BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1; +import static org.elasticsearch.TransportVersions.REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1; import static org.elasticsearch.common.unit.ByteSizeUnit.BYTES; import static org.elasticsearch.common.unit.ByteSizeUnit.GB; import static org.elasticsearch.common.unit.ByteSizeUnit.KB; @@ -111,7 +113,8 @@ static ByteSizeValue newByteSizeValue(long sizeInBytes, ByteSizeUnit desiredUnit public static ByteSizeValue readFrom(StreamInput in) throws IOException { long size = in.readZLong(); ByteSizeUnit unit = ByteSizeUnit.readFrom(in); - if (in.getTransportVersion().onOrAfter(BYTE_SIZE_VALUE_ALWAYS_USES_BYTES)) { + TransportVersion tv = in.getTransportVersion(); + if (tv.onOrAfter(BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1) && tv.before(REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1)) { return newByteSizeValue(size, unit); } else { return of(size, unit); @@ -120,7 +123,8 @@ public static ByteSizeValue readFrom(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(BYTE_SIZE_VALUE_ALWAYS_USES_BYTES)) { + TransportVersion tv = out.getTransportVersion(); + if (tv.onOrAfter(BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1) && tv.before(REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1)) { out.writeZLong(sizeInBytes); } else { out.writeZLong(Math.divideExact(sizeInBytes, desiredUnit.toBytes(1))); diff --git a/server/src/main/java/org/elasticsearch/features/package-info.java b/server/src/main/java/org/elasticsearch/features/package-info.java new file mode 100644 index 0000000000000..94b17648814af --- /dev/null +++ b/server/src/main/java/org/elasticsearch/features/package-info.java @@ -0,0 +1,151 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +/** + * The features infrastructure in Elasticsearch is responsible for two things: + *

    + *
  1. + * Determining when all nodes in a cluster have been upgraded to support some new functionality. + * This is used to only utilise new behavior when all nodes in the cluster support it. + *
  2. + *
  3. + * Ensuring nodes only join a cluster if they support all features already present on that cluster. + * This is to ensure that once a cluster supports a feature, it then never drops support. + * Conversely, when a feature is defined, it can then never be removed (but see Assumed features below). + *
  4. + *
+ * + *

Functionality

+ * This functionality starts with {@link org.elasticsearch.features.NodeFeature}. This is a single id representing + * new or a change in functionality - exactly what functionality that feature represents is up to the developer. These are expected + * to be {@code public static final} variables on a relevant class. Each area of code then exposes their features + * through an implementation of {@link org.elasticsearch.features.FeatureSpecification#getFeatures}, registered as an SPI implementation. + *

+ * All the features exposed by a node are included in the {@link org.elasticsearch.cluster.coordination.JoinTask.NodeJoinTask} information + * processed by {@link org.elasticsearch.cluster.coordination.NodeJoinExecutor}, when a node attempts to join a cluster. This checks + * the joining node has all the features already present on the cluster, and then records the set of features against that node + * in cluster state (in the {@link org.elasticsearch.cluster.ClusterFeatures} object). + * The calculated effective cluster features are not persisted, only the per-node feature set. + *

+ * Informally, the features supported by a particular node are 'node features'; when all nodes in a cluster support a particular + * feature, that is then a 'cluster feature'. + *

+ * Node features can then be checked by code to determine if all nodes in the cluster support that particular feature. + * This is done using {@link org.elasticsearch.features.FeatureService#clusterHasFeature}. This is a fast operation - the first + * time this method is called on a particular cluster state, the cluster features for a cluster are calculated from all the + * node feature information, and cached in the {@link org.elasticsearch.cluster.ClusterFeatures} object. + * Henceforth, all cluster feature checks are fast hash set lookups, at least until the nodes or master changes. + * + *

Features test infrastructure

+ * Features can be specified as conditions in YAML tests, as well as checks and conditions in code-defined rolling upgrade tests + * (see the Elasticsearch development documentation for more information). + * These checks are performed by the {@code TestFeatureService} interface, and its standard implementation {@code ESRestTestFeatureService}. + * + *

Test features

+ * Sometimes, you want to define a feature for nodes, but the only checks you need to do are as part of a test. In this case, + * the feature doesn't need to be included in the production feature set, it only needs to be present for automated tests. + * So alongside {@link org.elasticsearch.features.FeatureSpecification#getFeatures}, there is + * {@link org.elasticsearch.features.FeatureSpecification#getTestFeatures}. This can be used to exposed node features, + * but only for automated tests. It is ignored in production uses. This is determined by the {@link org.elasticsearch.features.FeatureData} + * class, which uses a system property (set by the test infrastructure) to decide whether to include test features or not, + * when gathering all the registered {@code FeatureSpecification} instances. + *

+ * Test features can be removed at-will (with appropriate backports), + * as there is no long-term upgrade guarantees required for clusters in automated tests. + * + *

Synthetic version features

+ * Cluster functionality checks performed on code built from the {@code main} branch can only use features to check functionality, + * but we also have branch releases with a longer release cadence. Sometimes tests need to be conditional on older versions + * (where there isn't a feature already defined in the right place), determined some point after the release has been finalized. + * This is where synthetic version features comes in. These can be used in tests where it is sensible to use + * a release version number (eg 8.12.3). The presence of these features is determined solely by the minimum + * node version present in the test cluster; no actual cluster features are defined nor checked. + * This is done by {@code ESRestTestFeatureService}, matching on features of the form {@code gte_v8.12.3}. + * For more information on their use, see the Elasticsearch developer documentation. + * + *

Assumed features

+ * Once a feature is defined on a cluster, it can never be removed - this is to ensure that functionality that is available + * on a cluster then never stops being available. However, this can lead to the list of features in cluster state growing ever larger. + * It is possible to remove defined cluster features, but only on a compatibility boundary (normally a new major release). + * To see how this can be so, it may be helpful to start with the compatibility guarantees we provide: + *
    + *
  • + * The first version of a new major (eg v9.0) can only form a cluster with the highest minor + * of the previous major (eg v8.18). + *
  • + *
  • + * This means that any cluster feature that was added before 8.18.0 was cut will always be present + * on any cluster that has at least one v9 node in it (as we don't support mixed-version clusters of more than two versions) + *
  • + *
  • + * This means that the code checks for those features can be completely removed from the code in v9, + * and the new behavior used all the time. + *
  • + *
  • + * This means that the node features themselves are not required, as they are never checked in the v9 codebase. + *
  • + *
+ * So, starting up a fresh v9 cluster, it does not need to have any knowledge of features added before 8.18, as the cluster + * will always have the new functionality. + *

+ * So then how do we do a rolling upgrade from 8.18 to 9.0, if features have been removed? Normally, that would prevent a 9.0 + * node from joining an 8.18 cluster, as it will not have all the required features published. However, we can make use + * of the major version difference to allow the rolling upgrade to proceed. + *

+ * This is where the {@link org.elasticsearch.features.NodeFeature#assumedAfterNextCompatibilityBoundary()} field comes in. On 8.18, + * we can mark all the features that will be removed in 9.0 as assumed. This means that when the features infrastructure sees a + * 9.x node, it will deem that node to have all the assumed features, even if the 9.0 node doesn't actually have those features + * in its published set. It will allow 9.0 nodes to join the cluster missing assumed features, + * and it will say the cluster supports a particular assumed feature even if it is missing from any 9.0 nodes in the cluster. + *

+ * Essentially, 8.18 nodes (or any other version that can form a cluster with 8.x or 9.x nodes) can mediate + * between the 8.x and 9.x feature sets, using {@code assumedAfterNextCompatibilityBoundary} + * to mark features that have been removed from 9.x, and know that 9.x nodes still meet the requirements for those features. + * These assumed features need to be defined before 8.18 and 9.0 are released. + *

+ * To go into more detail what happens during a rolling upgrade: + *

    + *
  1. Start with a homogenous 8.18 cluster, with an 8.18 cluster feature set (including assumed features)
  2. + *
  3. + * The first 9.0 node joins the cluster. Even though it is missing the features marked as assumed in 8.18, + * the 8.18 master lets the 9.0 node join because all the missing features are marked as assumed, + * and it is of the next major version. + *
  4. + *
  5. + * At this point, any feature checks that happen on 8.18 nodes for assumed features pass, despite the 9.0 node + * not publishing those features, as the 9.0 node is assumed to meet the requirements for that feature. + * 9.0 nodes do not have those checks at all, and the corresponding code running on 9.0 uses the new behaviour without checking. + *
  6. + *
  7. More 8.18 nodes get swapped for 9.0 nodes
  8. + *
  9. + * At some point, the master will change from an 8.18 node to a 9.0 node. The 9.0 node does not have the assumed + * features at all, so the new cluster feature set as calculated by the 9.0 master will only contain the features + * that 9.0 knows about (the calculated feature set is not persisted anywhere). + * The cluster has effectively dropped all the 8.18 features assumed in 9.0, whilst maintaining all behaviour. + * The upgrade carries on. + *
  10. + *
  11. + * If an 8.18 node were to quit and re-join the cluster still as 8.18 at this point + * (and there are other 8.18 nodes not yet upgraded), it will be able to join the cluster despite the master being 9.0. + * The 8.18 node publishes all the assumed features that 9.0 does not have - but that doesn't matter, because nodes can join + * with more features than are present in the cluster as a whole. The additional features are not added + * to the cluster feature set because not all the nodes in the cluster have those features + * (as there is at least one 9.0 node in the cluster - itself). +*
  12. + *
  13. + * At some point, the last 8.18 node leaves the cluster, and the cluster is a homogenous 9.0 cluster + * with only the cluster features known about by 9.0. + *
  14. + *
+ * + * For any dynamic releases that occur from main, the cadence is much quicker - once a feature is present in a cluster, + * you then only need one completed release to mark a feature as assumed, and a subsequent release to remove it from the codebase + * and elide the corresponding check. + */ +package org.elasticsearch.features; diff --git a/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java b/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java index 4dd94cfc046c9..4da63ea9868db 100644 --- a/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java +++ b/server/src/main/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorService.java @@ -12,9 +12,9 @@ import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.TriFunction; import org.elasticsearch.common.settings.Setting; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.Diagnosis; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthIndicatorImpact; @@ -54,7 +54,6 @@ public class ShardsCapacityHealthIndicatorService implements HealthIndicatorServ "The cluster is running low on room to add new shards. Adding data to new indices is at risk"; private static final String INDEX_CREATION_RISK = "The cluster is running low on room to add new shards. Adding data to new indices might soon fail."; - private static final String HELP_GUIDE = "https://ela.st/fix-shards-capacity"; private static final TriFunction, String, Diagnosis> SHARD_MAX_CAPACITY_REACHED_FN = ( id, setting, @@ -62,13 +61,11 @@ public class ShardsCapacityHealthIndicatorService implements HealthIndicatorServ new Diagnosis.Definition( NAME, id, - "Elasticsearch is about to reach the maximum number of shards it can host, based on your current settings.", - "Increase the value of [" - + setting.getKey() - + "] cluster setting or remove " + "Elasticsearch is about to reach the maximum number of shards it can host as set by [" + setting.getKey() + "].", + "Increase the number of nodes in your cluster or remove some " + indexType - + " indices to clear up resources.", - HELP_GUIDE + + " indices to reduce the number of shards in the cluster.", + ReferenceDocs.CLUSTER_SHARD_LIMIT.toString() ), null ); @@ -82,22 +79,20 @@ public class ShardsCapacityHealthIndicatorService implements HealthIndicatorServ new HealthIndicatorImpact(NAME, "creation_of_new_indices_at_risk", 2, INDEX_CREATION_RISK, List.of(ImpactArea.INGEST)) ); static final Diagnosis SHARDS_MAX_CAPACITY_REACHED_DATA_NODES = SHARD_MAX_CAPACITY_REACHED_FN.apply( - "increase_max_shards_per_node", + "decrease_shards_per_non_frozen_node", ShardLimitValidator.SETTING_CLUSTER_MAX_SHARDS_PER_NODE, - "data" + "non-frozen" ); static final Diagnosis SHARDS_MAX_CAPACITY_REACHED_FROZEN_NODES = SHARD_MAX_CAPACITY_REACHED_FN.apply( - "increase_max_shards_per_node_frozen", + "decrease_shards_per_frozen_node", ShardLimitValidator.SETTING_CLUSTER_MAX_SHARDS_PER_NODE_FROZEN, "frozen" ); private final ClusterService clusterService; - private final FeatureService featureService; - public ShardsCapacityHealthIndicatorService(ClusterService clusterService, FeatureService featureService) { + public ShardsCapacityHealthIndicatorService(ClusterService clusterService) { this.clusterService = clusterService; - this.featureService = featureService; } @Override diff --git a/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java b/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java index 250cb81183899..f3d58fe4b051f 100644 --- a/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java +++ b/server/src/main/java/org/elasticsearch/index/CompositeIndexEventListener.java @@ -28,6 +28,7 @@ import java.util.Iterator; import java.util.List; import java.util.function.Consumer; +import java.util.function.Supplier; import static org.elasticsearch.core.Strings.format; @@ -349,4 +350,16 @@ public void afterFilesRestoredFromRepository(IndexShard indexShard) { } } } + + @Override + public void onAcquirePrimaryOperationPermit(IndexShard indexShard, Supplier> onPermitAcquiredListenerSupplier) { + for (IndexEventListener listener : listeners) { + try { + listener.onAcquirePrimaryOperationPermit(indexShard, onPermitAcquiredListenerSupplier); + } catch (Exception e) { + logger.warn(() -> "[" + indexShard.shardId() + "] failed to invoke the listener on acquiring a primary permit", e); + throw e; + } + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java index 4ebcbbf8a068b..83259d70ae278 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/GeoPointFieldMapper.java @@ -8,15 +8,20 @@ */ package org.elasticsearch.index.mapper; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.FieldType; import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.document.LatLonPoint; import org.apache.lucene.document.ShapeField; import org.apache.lucene.document.StoredField; import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.geo.LatLonGeometry; +import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.search.IndexOrDocValuesQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.NumericUtils; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.geo.GeoFormatterFactory; @@ -282,17 +287,24 @@ public FieldMapper.Builder getMergeBuilder() { @Override protected void index(DocumentParserContext context, GeoPoint geometry) throws IOException { - if (fieldType().isIndexed()) { - context.doc().add(new LatLonPoint(fieldType().name(), geometry.lat(), geometry.lon())); - } - if (fieldType().hasDocValues()) { + final boolean indexed = fieldType().isIndexed(); + final boolean hasDocValues = fieldType().hasDocValues(); + final boolean store = fieldType().isStored(); + if (indexed && hasDocValues) { + context.doc().add(new LatLonPointWithDocValues(fieldType().name(), geometry.lat(), geometry.lon())); + } else if (hasDocValues) { context.doc().add(new LatLonDocValuesField(fieldType().name(), geometry.lat(), geometry.lon())); - } else if (fieldType().isStored() || fieldType().isIndexed()) { - context.addToFieldNames(fieldType().name()); + } else if (indexed) { + context.doc().add(new LatLonPoint(fieldType().name(), geometry.lat(), geometry.lon())); } - if (fieldType().isStored()) { + if (store) { context.doc().add(new StoredField(fieldType().name(), geometry.toString())); } + if (hasDocValues == false && (indexed || store)) { + // When the field doesn't have doc values so that we can run exists queries, we also need to index the field name separately. + context.addToFieldNames(fieldType().name()); + } + // TODO phase out geohash (which is currently used in the CompletionSuggester) // we only expose the geohash value and disallow advancing tokens, hence we can reuse the same parser throughout multiple sub-fields DocumentParserContext parserContext = context.switchParser(new GeoHashMultiFieldParser(context.parser(), geometry.geohash())); @@ -622,4 +634,60 @@ protected void writeValue(XContentBuilder b, long value) throws IOException { return super.syntheticSourceSupport(); } + + /** + * Utility class that allows adding index and doc values in one field + */ + public static class LatLonPointWithDocValues extends Field { + + public static final FieldType TYPE = new FieldType(); + + static { + TYPE.setDimensions(2, Integer.BYTES); + TYPE.setDocValuesType(DocValuesType.SORTED_NUMERIC); + TYPE.freeze(); + } + + // holds the doc value value. + private final long docValue; + + public LatLonPointWithDocValues(String name, double latitude, double longitude) { + super(name, TYPE); + final byte[] bytes; + if (fieldsData == null) { + bytes = new byte[8]; + fieldsData = new BytesRef(bytes); + } else { + bytes = ((BytesRef) fieldsData).bytes; + } + + final int latitudeEncoded = GeoEncodingUtils.encodeLatitude(latitude); + final int longitudeEncoded = GeoEncodingUtils.encodeLongitude(longitude); + NumericUtils.intToSortableBytes(latitudeEncoded, bytes, 0); + NumericUtils.intToSortableBytes(longitudeEncoded, bytes, Integer.BYTES); + docValue = (((long) latitudeEncoded) << 32) | (longitudeEncoded & 0xFFFFFFFFL); + } + + @Override + public Number numericValue() { + return docValue; + } + + @Override + public String toString() { + StringBuilder result = new StringBuilder(); + result.append(getClass().getSimpleName()); + result.append(" <"); + result.append(name); + result.append(':'); + + byte[] bytes = ((BytesRef) fieldsData).bytes; + result.append(GeoEncodingUtils.decodeLatitude(bytes, 0)); + result.append(','); + result.append(GeoEncodingUtils.decodeLongitude(bytes, Integer.BYTES)); + + result.append('>'); + return result.toString(); + } + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java index 1097c1f0ea16b..57e6e837b34ae 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperFeatures.java @@ -32,6 +32,10 @@ public Set getFeatures() { "mapper.constant_keyword.synthetic_source_write_fix" ); + public static final NodeFeature COUNTED_KEYWORD_SYNTHETIC_SOURCE_NATIVE_SUPPORT = new NodeFeature( + "mapper.counted_keyword.synthetic_source_native_support" + ); + public static final NodeFeature META_FETCH_FIELDS_ERROR_CODE_CHANGED = new NodeFeature("meta_fetch_fields_error_code_changed"); public static final NodeFeature SPARSE_VECTOR_STORE_SUPPORT = new NodeFeature("mapper.sparse_vector.store_support"); @@ -49,7 +53,9 @@ public Set getTestFeatures() { CONSTANT_KEYWORD_SYNTHETIC_SOURCE_WRITE_FIX, META_FETCH_FIELDS_ERROR_CODE_CHANGED, SPARSE_VECTOR_STORE_SUPPORT, - SourceFieldMapper.SYNTHETIC_RECOVERY_SOURCE + COUNTED_KEYWORD_SYNTHETIC_SOURCE_NATIVE_SUPPORT, + SourceFieldMapper.SYNTHETIC_RECOVERY_SOURCE, + ObjectMapper.SUBOBJECTS_FALSE_MAPPING_UPDATE_FIX ); } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java index fb4f86c3cba98..b181225d80ddf 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/MapperService.java @@ -164,6 +164,17 @@ public boolean isAutoUpdate() { Property.Dynamic, Property.IndexScope ); + /** + * Legacy index setting, kept for 7.x BWC compatibility. This setting has no effect in 8.x. Do not use. + */ + @Deprecated + public static final Setting INDEX_MAPPER_DYNAMIC_SETTING = Setting.boolSetting( + "index.mapper.dynamic", + true, + Property.Dynamic, + Property.IndexScope, + Property.IndexSettingDeprecatedInV7AndRemovedInV8 + ); private final IndexAnalyzers indexAnalyzers; private final MappingParser mappingParser; diff --git a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java index 86ce4fbb74837..abca8e057f3b8 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/ObjectMapper.java @@ -20,6 +20,7 @@ import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.core.Nullable; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.mapper.MapperService.MergeReason; @@ -48,6 +49,7 @@ public class ObjectMapper extends Mapper { private static final Logger logger = LogManager.getLogger(ObjectMapper.class); private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(ObjectMapper.class); public static final FeatureFlag SUB_OBJECTS_AUTO_FEATURE_FLAG = new FeatureFlag("sub_objects_auto"); + static final NodeFeature SUBOBJECTS_FALSE_MAPPING_UPDATE_FIX = new NodeFeature("mapper.subobjects_false_mapping_update_fix"); public static final String CONTENT_TYPE = "object"; static final String STORE_ARRAY_SOURCE_PARAM = "store_array_source"; @@ -659,11 +661,21 @@ private static Map buildMergedMappers( if (subobjects.isPresent() && subobjects.get() == Subobjects.DISABLED && mergeWithMapper instanceof ObjectMapper objectMapper) { - // An existing mapping that has set `subobjects: false` is merged with a mapping with sub-objects - objectMapper.asFlattenedFieldMappers(objectMergeContext.getMapperBuilderContext()) - .stream() - .filter(m -> objectMergeContext.decrementFieldBudgetIfPossible(m.getTotalFieldsCount())) - .forEach(m -> putMergedMapper(mergedMappers, m)); + // An existing mapping that has set `subobjects: false` is merged with a mapping with sub-objects. + List flattenedMappers = objectMapper.asFlattenedFieldMappers( + objectMergeContext.getMapperBuilderContext() + ); + for (FieldMapper flattenedMapper : flattenedMappers) { + if (objectMergeContext.decrementFieldBudgetIfPossible(flattenedMapper.getTotalFieldsCount())) { + var conflict = mergedMappers.get(flattenedMapper.leafName()); + if (objectMergeContext.getMapperBuilderContext().getMergeReason() == MergeReason.INDEX_TEMPLATE + || conflict == null) { + putMergedMapper(mergedMappers, flattenedMapper); + } else { + putMergedMapper(mergedMappers, conflict.merge(flattenedMapper, objectMergeContext)); + } + } + } } else if (objectMergeContext.decrementFieldBudgetIfPossible(mergeWithMapper.getTotalFieldsCount())) { putMergedMapper(mergedMappers, mergeWithMapper); } else if (mergeWithMapper instanceof ObjectMapper om) { diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java b/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java index 4e55a2e9599d5..e5104948cc426 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexEventListener.java @@ -17,6 +17,8 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason; +import java.util.function.Supplier; + /** * An index event listener is the primary extension point for plugins and build-in services * to react / listen to per-index and per-shard events. These listeners are registered per-index @@ -190,4 +192,14 @@ default void afterIndexShardRecovery(IndexShard indexShard, ActionListener * @param indexShard the shard that is recovering */ default void afterFilesRestoredFromRepository(IndexShard indexShard) {} + + /** + * Called when a single primary permit is acquired for the given shard (see + * {@link IndexShard#acquirePrimaryOperationPermit(ActionListener, java.util.concurrent.Executor)}). + * + * @param indexShard the shard of which a primary permit is requested + * @param onPermitAcquiredListenerSupplier call this immediately to get a listener when the permit is acquired. The listener must be + * completed in order for the permit to be given to the acquiring operation. + */ + default void onAcquirePrimaryOperationPermit(IndexShard indexShard, Supplier> onPermitAcquiredListenerSupplier) {} } diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index f52ea41d811c0..ab1c936d1c469 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -34,6 +34,7 @@ import org.elasticsearch.action.admin.indices.flush.FlushRequest; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.replication.PendingReplicationActions; import org.elasticsearch.action.support.replication.ReplicationResponse; @@ -189,7 +190,6 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.index.seqno.RetentionLeaseActions.RETAIN_ALL; import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; -import static org.elasticsearch.index.shard.IndexShard.PrimaryPermitCheck.CHECK_PRIMARY_MODE; public class IndexShard extends AbstractIndexShardComponent implements IndicesClusterStateService.Shard { @@ -779,28 +779,10 @@ public void relocated( final String targetAllocationId, final BiConsumer> consumer, final ActionListener listener - ) throws IllegalIndexShardStateException, IllegalStateException { - relocated(targetNodeId, targetAllocationId, consumer, listener, null); - } - - /** - * Provides an variant of {@link IndexShard#relocated(String, String, BiConsumer, ActionListener, Releasable)} with an option - * to relocate the shard under externally acquired primary permits. - * - * @param acquiredPrimaryPermits if null, waits until all the primary permits are acquired, otherwise it calls the consumer immediately - */ - public void relocated( - final String targetNodeId, - final String targetAllocationId, - final BiConsumer> consumer, - final ActionListener listener, - @Nullable final Releasable acquiredPrimaryPermits ) throws IllegalIndexShardStateException, IllegalStateException { assert shardRouting.primary() : "only primaries can be marked as relocated: " + shardRouting; - assert acquiredPrimaryPermits == null || indexShardOperationPermits.getActiveOperationsCount() == OPERATIONS_BLOCKED - : "external primary permits are provided but not held by the shard"; try (Releasable forceRefreshes = refreshListeners.forceRefreshes()) { - ActionListener onAcquired = new ActionListener<>() { + indexShardOperationPermits.blockOperations(new ActionListener<>() { @Override public void onResponse(Releasable releasable) { boolean success = false; @@ -878,13 +860,8 @@ public void onFailure(Exception e) { listener.onFailure(e); } } - }; - if (acquiredPrimaryPermits == null) { - // Wait on current thread because this execution is wrapped by CancellableThreads and we want to be able to interrupt it - indexShardOperationPermits.blockOperations(onAcquired, 30L, TimeUnit.MINUTES, EsExecutors.DIRECT_EXECUTOR_SERVICE); - } else { - ActionListener.completeWith(onAcquired, () -> acquiredPrimaryPermits); - } + }, 30L, TimeUnit.MINUTES, EsExecutors.DIRECT_EXECUTOR_SERVICE); // Wait on current thread because this execution is wrapped by + // CancellableThreads and we want to be able to interrupt it } } @@ -3592,48 +3569,35 @@ private EngineConfig newEngineConfig(LongSupplier globalCheckpointSupplier) { ); } - /** - * Check to run before running the primary permit operation - */ - public enum PrimaryPermitCheck { - CHECK_PRIMARY_MODE, - /** - * IMPORTANT: Currently intented to be used only for acquiring primary permits during the recovery of hollow shards. - * Don't disable primary mode checks unless you're really sure. - */ - NONE - } - /** * Acquire a primary operation permit whenever the shard is ready for indexing. If a permit is directly available, the provided * ActionListener will be called on the calling thread. During relocation hand-off, permit acquisition can be delayed. The provided * ActionListener will then be called using the provided executor. */ public void acquirePrimaryOperationPermit(ActionListener onPermitAcquired, Executor executorOnDelay) { - acquirePrimaryOperationPermit(onPermitAcquired, executorOnDelay, false, CHECK_PRIMARY_MODE); + acquirePrimaryOperationPermit(onPermitAcquired, executorOnDelay, false); } public void acquirePrimaryOperationPermit( ActionListener onPermitAcquired, Executor executorOnDelay, boolean forceExecution - ) { - acquirePrimaryOperationPermit(onPermitAcquired, executorOnDelay, forceExecution, CHECK_PRIMARY_MODE); - } - - public void acquirePrimaryOperationPermit( - ActionListener onPermitAcquired, - Executor executorOnDelay, - boolean forceExecution, - PrimaryPermitCheck primaryPermitCheck ) { verifyNotClosed(); assert shardRouting.primary() : "acquirePrimaryOperationPermit should only be called on primary shard: " + shardRouting; - indexShardOperationPermits.acquire( - wrapPrimaryOperationPermitListener(primaryPermitCheck, onPermitAcquired), - executorOnDelay, - forceExecution - ); + + ActionListener onPermitAcquiredWrapped = onPermitAcquired.delegateFailureAndWrap((delegate, releasable) -> { + final ActionListener wrappedListener = indexShardOperationPermits.wrapContextPreservingActionListener( + delegate, + executorOnDelay, + forceExecution + ); + try (var listeners = new RefCountingListener(wrappedListener.map(unused -> releasable))) { + indexEventListener.onAcquirePrimaryOperationPermit(this, () -> listeners.acquire()); + } + }); + + indexShardOperationPermits.acquire(wrapPrimaryOperationPermitListener(onPermitAcquiredWrapped), executorOnDelay, forceExecution); } public boolean isPrimaryMode() { @@ -3641,51 +3605,33 @@ public boolean isPrimaryMode() { return replicationTracker.isPrimaryMode(); } - public void acquireAllPrimaryOperationsPermits(final ActionListener onPermitAcquired, final TimeValue timeout) { - acquireAllPrimaryOperationsPermits(onPermitAcquired, timeout, CHECK_PRIMARY_MODE); - } - /** * Acquire all primary operation permits. Once all permits are acquired, the provided ActionListener is called. * It is the responsibility of the caller to close the {@link Releasable}. */ - public void acquireAllPrimaryOperationsPermits( - final ActionListener onPermitAcquired, - final TimeValue timeout, - final PrimaryPermitCheck primaryPermitCheck - ) { + public void acquireAllPrimaryOperationsPermits(final ActionListener onPermitAcquired, final TimeValue timeout) { verifyNotClosed(); assert shardRouting.primary() : "acquireAllPrimaryOperationsPermits should only be called on primary shard: " + shardRouting; - asyncBlockOperations( - wrapPrimaryOperationPermitListener(primaryPermitCheck, onPermitAcquired), - timeout.duration(), - timeout.timeUnit() - ); + asyncBlockOperations(wrapPrimaryOperationPermitListener(onPermitAcquired), timeout.duration(), timeout.timeUnit()); } /** - * Wraps the action to run on a primary after acquiring permit. + * Wraps the action to run on a primary after acquiring permit. This wrapping is used to check if the shard is in primary mode before + * executing the action. * - * @param primaryPermitCheck check to run before the primary mode operation * @param listener the listener to wrap * @return the wrapped listener */ - private ActionListener wrapPrimaryOperationPermitListener( - final PrimaryPermitCheck primaryPermitCheck, - final ActionListener listener - ) { - return switch (primaryPermitCheck) { - case CHECK_PRIMARY_MODE -> listener.delegateFailure((l, r) -> { - if (isPrimaryMode()) { - l.onResponse(r); - } else { - r.close(); - l.onFailure(new ShardNotInPrimaryModeException(shardId, state)); - } - }); - case NONE -> listener; - }; + private ActionListener wrapPrimaryOperationPermitListener(final ActionListener listener) { + return listener.delegateFailure((l, r) -> { + if (isPrimaryMode()) { + l.onResponse(r); + } else { + r.close(); + l.onFailure(new ShardNotInPrimaryModeException(shardId, state)); + } + }); } private void asyncBlockOperations(ActionListener onPermitAcquired, long timeout, TimeUnit timeUnit) { @@ -3723,7 +3669,7 @@ public void runUnderPrimaryPermit(final Runnable runnable, final Consumer void bumpPrimaryTerm( diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java index 94ac4f4aca096..79f5d054df30d 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShardOperationPermits.java @@ -216,32 +216,7 @@ private void innerAcquire( try { synchronized (this) { if (queuedBlockOperations > 0) { - final Supplier contextSupplier = threadPool.getThreadContext().newRestorableContext(false); - final ActionListener wrappedListener; - if (executorOnDelay != null) { - wrappedListener = new ContextPreservingActionListener<>(contextSupplier, onAcquired).delegateFailure( - (l, r) -> executorOnDelay.execute(new ActionRunnable<>(l) { - @Override - public boolean isForceExecution() { - return forceExecution; - } - - @Override - protected void doRun() { - listener.onResponse(r); - } - - @Override - public void onRejection(Exception e) { - IOUtils.closeWhileHandlingException(r); - super.onRejection(e); - } - }) - ); - } else { - wrappedListener = new ContextPreservingActionListener<>(contextSupplier, onAcquired); - } - delayedOperations.add(wrappedListener); + delayedOperations.add(wrapContextPreservingActionListener(onAcquired, executorOnDelay, forceExecution)); return; } else { releasable = acquire(); @@ -255,6 +230,39 @@ public void onRejection(Exception e) { onAcquired.onResponse(releasable); } + public ActionListener wrapContextPreservingActionListener( + ActionListener listener, + @Nullable final Executor executorOnDelay, + final boolean forceExecution + ) { + final Supplier contextSupplier = threadPool.getThreadContext().newRestorableContext(false); + final ActionListener wrappedListener; + if (executorOnDelay != null) { + wrappedListener = new ContextPreservingActionListener<>(contextSupplier, listener).delegateFailure( + (l, r) -> executorOnDelay.execute(new ActionRunnable<>(l) { + @Override + public boolean isForceExecution() { + return forceExecution; + } + + @Override + protected void doRun() { + listener.onResponse(r); + } + + @Override + public void onRejection(Exception e) { + IOUtils.closeWhileHandlingException(r); + super.onRejection(e); + } + }) + ); + } else { + wrappedListener = new ContextPreservingActionListener<>(contextSupplier, listener); + } + return wrappedListener; + } + private Releasable acquire() throws InterruptedException { assert Thread.holdsLock(this); if (semaphore.tryAcquire(1, 0, TimeUnit.SECONDS)) { // the un-timed tryAcquire methods do not honor the fairness setting diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java b/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java index d88bbfa3eba17..66e8f98f77fef 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesServiceBuilder.java @@ -34,7 +34,7 @@ import org.elasticsearch.plugins.EnginePlugin; import org.elasticsearch.plugins.IndexStorePlugin; import org.elasticsearch.plugins.PluginsService; -import org.elasticsearch.plugins.SearchPlugin; +import org.elasticsearch.plugins.internal.InternalSearchPlugin; import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry; @@ -266,8 +266,8 @@ public IndicesService build() { .flatMap(m -> m.entrySet().stream()) .collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)); - var queryRewriteInterceptors = pluginsService.filterPlugins(SearchPlugin.class) - .map(SearchPlugin::getQueryRewriteInterceptors) + var queryRewriteInterceptors = pluginsService.filterPlugins(InternalSearchPlugin.class) + .map(InternalSearchPlugin::getQueryRewriteInterceptors) .flatMap(List::stream) .collect(Collectors.toMap(QueryRewriteInterceptor::getQueryName, interceptor -> { if (interceptor.getQueryName() == null) { diff --git a/server/src/main/java/org/elasticsearch/inference/InferenceServiceConfiguration.java b/server/src/main/java/org/elasticsearch/inference/InferenceServiceConfiguration.java index 41cf339c751d1..5004186d03848 100644 --- a/server/src/main/java/org/elasticsearch/inference/InferenceServiceConfiguration.java +++ b/server/src/main/java/org/elasticsearch/inference/InferenceServiceConfiguration.java @@ -191,7 +191,7 @@ public Builder setName(String name) { } public Builder setTaskTypes(EnumSet taskTypes) { - this.taskTypes = taskTypes; + this.taskTypes = TaskType.copyOf(taskTypes); return this; } diff --git a/server/src/main/java/org/elasticsearch/inference/SettingsConfiguration.java b/server/src/main/java/org/elasticsearch/inference/SettingsConfiguration.java index 188b8a7e82b57..a19b6735536ef 100644 --- a/server/src/main/java/org/elasticsearch/inference/SettingsConfiguration.java +++ b/server/src/main/java/org/elasticsearch/inference/SettingsConfiguration.java @@ -28,10 +28,13 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; +import java.util.EnumSet; import java.util.HashMap; +import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; +import java.util.Set; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; @@ -50,6 +53,7 @@ public class SettingsConfiguration implements Writeable, ToXContentObject { private final boolean sensitive; private final boolean updatable; private final SettingsConfigurationFieldType type; + private final EnumSet supportedTaskTypes; /** * Constructs a new {@link SettingsConfiguration} instance with specified properties. @@ -61,6 +65,7 @@ public class SettingsConfiguration implements Writeable, ToXContentObject { * @param sensitive A boolean indicating whether the configuration contains sensitive information. * @param updatable A boolean indicating whether the configuration can be updated. * @param type The type of the configuration field, defined by {@link SettingsConfigurationFieldType}. + * @param supportedTaskTypes The task types that support this field. */ private SettingsConfiguration( Object defaultValue, @@ -69,7 +74,8 @@ private SettingsConfiguration( boolean required, boolean sensitive, boolean updatable, - SettingsConfigurationFieldType type + SettingsConfigurationFieldType type, + EnumSet supportedTaskTypes ) { this.defaultValue = defaultValue; this.description = description; @@ -78,6 +84,7 @@ private SettingsConfiguration( this.sensitive = sensitive; this.updatable = updatable; this.type = type; + this.supportedTaskTypes = supportedTaskTypes; } public SettingsConfiguration(StreamInput in) throws IOException { @@ -88,6 +95,7 @@ public SettingsConfiguration(StreamInput in) throws IOException { this.sensitive = in.readBoolean(); this.updatable = in.readBoolean(); this.type = in.readEnum(SettingsConfigurationFieldType.class); + this.supportedTaskTypes = in.readEnumSet(TaskType.class); } static final ParseField DEFAULT_VALUE_FIELD = new ParseField("default_value"); @@ -97,6 +105,7 @@ public SettingsConfiguration(StreamInput in) throws IOException { static final ParseField SENSITIVE_FIELD = new ParseField("sensitive"); static final ParseField UPDATABLE_FIELD = new ParseField("updatable"); static final ParseField TYPE_FIELD = new ParseField("type"); + static final ParseField SUPPORTED_TASK_TYPES = new ParseField("supported_task_types"); @SuppressWarnings("unchecked") private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( @@ -104,7 +113,15 @@ public SettingsConfiguration(StreamInput in) throws IOException { true, args -> { int i = 0; - return new SettingsConfiguration.Builder().setDefaultValue(args[i++]) + + EnumSet supportedTaskTypes = EnumSet.noneOf(TaskType.class); + var supportedTaskTypesListOfStrings = (List) args[i++]; + + for (var supportedTaskTypeString : supportedTaskTypesListOfStrings) { + supportedTaskTypes.add(TaskType.fromString(supportedTaskTypeString)); + } + + return new SettingsConfiguration.Builder(supportedTaskTypes).setDefaultValue(args[i++]) .setDescription((String) args[i++]) .setLabel((String) args[i++]) .setRequired((Boolean) args[i++]) @@ -116,6 +133,7 @@ public SettingsConfiguration(StreamInput in) throws IOException { ); static { + PARSER.declareStringArray(constructorArg(), SUPPORTED_TASK_TYPES); PARSER.declareField(optionalConstructorArg(), (p, c) -> { if (p.currentToken() == XContentParser.Token.VALUE_STRING) { return p.text(); @@ -169,28 +187,8 @@ public SettingsConfigurationFieldType getType() { return type; } - /** - * Parses a configuration value from a parser context. - * This method can parse strings, numbers, booleans, objects, and null values, matching the types commonly - * supported in {@link SettingsConfiguration}. - * - * @param p the {@link org.elasticsearch.xcontent.XContentParser} instance from which to parse the configuration value. - */ - public static Object parseConfigurationValue(XContentParser p) throws IOException { - - if (p.currentToken() == XContentParser.Token.VALUE_STRING) { - return p.text(); - } else if (p.currentToken() == XContentParser.Token.VALUE_NUMBER) { - return p.numberValue(); - } else if (p.currentToken() == XContentParser.Token.VALUE_BOOLEAN) { - return p.booleanValue(); - } else if (p.currentToken() == XContentParser.Token.START_OBJECT) { - // Crawler expects the value to be an object - return p.map(); - } else if (p.currentToken() == XContentParser.Token.VALUE_NULL) { - return null; - } - throw new XContentParseException("Unsupported token [" + p.currentToken() + "]"); + public Set getSupportedTaskTypes() { + return supportedTaskTypes; } @Override @@ -211,6 +209,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (type != null) { builder.field(TYPE_FIELD.getPreferredName(), type.toString()); } + builder.field(SUPPORTED_TASK_TYPES.getPreferredName(), supportedTaskTypes); } builder.endObject(); return builder; @@ -237,6 +236,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(sensitive); out.writeBoolean(updatable); out.writeEnum(type); + out.writeEnumSet(supportedTaskTypes); } public Map toMap() { @@ -253,6 +253,7 @@ public Map toMap() { Optional.ofNullable(type).ifPresent(t -> map.put(TYPE_FIELD.getPreferredName(), t.toString())); + map.put(SUPPORTED_TASK_TYPES.getPreferredName(), supportedTaskTypes); return map; } @@ -267,12 +268,13 @@ public boolean equals(Object o) { && Objects.equals(defaultValue, that.defaultValue) && Objects.equals(description, that.description) && Objects.equals(label, that.label) - && type == that.type; + && type == that.type + && Objects.equals(supportedTaskTypes, that.supportedTaskTypes); } @Override public int hashCode() { - return Objects.hash(defaultValue, description, label, required, sensitive, updatable, type); + return Objects.hash(defaultValue, description, label, required, sensitive, updatable, type, supportedTaskTypes); } public static class Builder { @@ -284,6 +286,11 @@ public static class Builder { private boolean sensitive; private boolean updatable; private SettingsConfigurationFieldType type; + private final EnumSet supportedTaskTypes; + + public Builder(EnumSet supportedTaskTypes) { + this.supportedTaskTypes = TaskType.copyOf(Objects.requireNonNull(supportedTaskTypes)); + } public Builder setDefaultValue(Object defaultValue) { this.defaultValue = defaultValue; @@ -321,7 +328,7 @@ public Builder setType(SettingsConfigurationFieldType type) { } public SettingsConfiguration build() { - return new SettingsConfiguration(defaultValue, description, label, required, sensitive, updatable, type); + return new SettingsConfiguration(defaultValue, description, label, required, sensitive, updatable, type, supportedTaskTypes); } } } diff --git a/server/src/main/java/org/elasticsearch/inference/TaskType.java b/server/src/main/java/org/elasticsearch/inference/TaskType.java index 17e77be43bd1a..73a0e3cc8a774 100644 --- a/server/src/main/java/org/elasticsearch/inference/TaskType.java +++ b/server/src/main/java/org/elasticsearch/inference/TaskType.java @@ -16,6 +16,7 @@ import org.elasticsearch.rest.RestStatus; import java.io.IOException; +import java.util.EnumSet; import java.util.Locale; import java.util.Objects; @@ -78,4 +79,14 @@ public void writeTo(StreamOutput out) throws IOException { public static String unsupportedTaskTypeErrorMsg(TaskType taskType, String serviceName) { return "The [" + serviceName + "] service does not support task type [" + taskType + "]"; } + + /** + * Copies a {@link EnumSet} if non-empty, otherwise returns an empty {@link EnumSet}. This is essentially the same + * as {@link EnumSet#copyOf(EnumSet)}, except it does not throw for an empty set. + * @param taskTypes task types to copy + * @return a copy of the passed in {@link EnumSet} + */ + public static EnumSet copyOf(EnumSet taskTypes) { + return taskTypes.isEmpty() ? EnumSet.noneOf(TaskType.class) : EnumSet.copyOf(taskTypes); + } } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestService.java b/server/src/main/java/org/elasticsearch/ingest/IngestService.java index 1494d2a46f9d0..86522742a66c0 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestService.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestService.java @@ -293,7 +293,7 @@ static boolean isRolloverOnWrite(Metadata metadata, IndexRequest indexRequest) { if (dataStream == null) { return false; } - return dataStream.getBackingIndices().isRolloverOnWrite(); + return dataStream.getDataComponent().isRolloverOnWrite(); } /** diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 4693b4fcf718a..61ac8bbbfc69a 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -1352,7 +1352,7 @@ private Module loadDiagnosticServices( new StableMasterHealthIndicatorService(coordinationDiagnosticsService, clusterService), new RepositoryIntegrityHealthIndicatorService(clusterService), new DiskHealthIndicatorService(clusterService, featureService), - new ShardsCapacityHealthIndicatorService(clusterService, featureService), + new ShardsCapacityHealthIndicatorService(clusterService), fileSettingsHealthIndicatorService ); var pluginHealthIndicatorServices = pluginsService.filterPlugins(HealthPlugin.class) diff --git a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java index e87e9ee85b29c..f5670ebd8a543 100644 --- a/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/SearchPlugin.java @@ -23,7 +23,6 @@ import org.elasticsearch.index.query.QueryParser; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionParser; -import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.search.SearchExtBuilder; import org.elasticsearch.search.aggregations.Aggregation; import org.elasticsearch.search.aggregations.AggregationBuilder; @@ -129,14 +128,6 @@ default List> getQueries() { return emptyList(); } - /** - * @return Applicable {@link QueryRewriteInterceptor}s configured for this plugin. - * Note: This is internal to Elasticsearch's API and not extensible by external plugins. - */ - default List getQueryRewriteInterceptors() { - return emptyList(); - } - /** * The new {@link Aggregation}s added by this plugin. */ diff --git a/server/src/main/java/org/elasticsearch/plugins/internal/InternalSearchPlugin.java b/server/src/main/java/org/elasticsearch/plugins/internal/InternalSearchPlugin.java new file mode 100644 index 0000000000000..7ac18c4640a0b --- /dev/null +++ b/server/src/main/java/org/elasticsearch/plugins/internal/InternalSearchPlugin.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.plugins.internal; + +import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; + +import java.util.List; + +import static java.util.Collections.emptyList; + +public interface InternalSearchPlugin { + + /** + * @return Applicable {@link QueryRewriteInterceptor}s configured for this plugin. + * Note: This is internal to Elasticsearch's API and not extensible by external plugins. + */ + default List getQueryRewriteInterceptors() { + return emptyList(); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregator.java index ec7bc05e3328a..c8b364c08bec5 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/metrics/SumAggregator.java @@ -67,18 +67,33 @@ public void collect(int doc, long bucket) throws IOException { @Override protected LeafBucketCollector getLeafCollector(NumericDoubleValues values, final LeafBucketCollector sub) { - final CompensatedSum kahanSummation = new CompensatedSum(0, 0); return new LeafBucketCollectorBase(sub, values) { @Override public void collect(int doc, long bucket) throws IOException { if (values.advanceExact(doc)) { maybeGrow(bucket); + var sums = SumAggregator.this.sums; // Compute the sum of double values with Kahan summation algorithm which is more // accurate than naive summation. - kahanSummation.reset(sums.get(bucket), compensations.get(bucket)); - kahanSummation.add(values.doubleValue()); - compensations.set(bucket, kahanSummation.delta()); - sums.set(bucket, kahanSummation.value()); + double value = sums.get(bucket); + // If the value is Inf or NaN, just add it to the running tally to "convert" to + // Inf/NaN. This keeps the behavior bwc from before kahan summing + double v = values.doubleValue(); + if (Double.isFinite(v) == false) { + value = v + value; + } + + if (Double.isFinite(value)) { + var compensations = SumAggregator.this.compensations; + double delta = compensations.get(bucket); + double correctedSum = v + delta; + double updatedValue = value + correctedSum; + delta = correctedSum - (updatedValue - value); + value = updatedValue; + compensations.set(bucket, delta); + } + + sums.set(bucket, value); } } }; diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java index 94505d3096dec..82f593817b161 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/GeoContextMapping.java @@ -184,7 +184,7 @@ public Set parseContext(LuceneDocument document) { if (field instanceof StringField) { spare.resetFromString(field.stringValue()); geohashes.add(spare.geohash()); - } else if (field instanceof LatLonDocValuesField) { + } else if (field instanceof LatLonDocValuesField || field instanceof GeoPointFieldMapper.LatLonPointWithDocValues) { spare.resetFromEncoded(field.numericValue().longValue()); geohashes.add(spare.geohash()); } else if (field instanceof LatLonPoint) { diff --git a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java index debe3d6e6bd92..0bdf39142f766 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java +++ b/server/src/main/java/org/elasticsearch/snapshots/RestoreService.java @@ -426,7 +426,7 @@ private void startRestore( if (DataStream.isFailureStoreFeatureFlagEnabled()) { failureIndices = dataStreamsToRestore.values() .stream() - .flatMap(ds -> ds.getFailureIndices().getIndices().stream().map(idx -> new Tuple<>(ds.isSystem(), idx.getName()))) + .flatMap(ds -> ds.getFailureIndices().stream().map(idx -> new Tuple<>(ds.isSystem(), idx.getName()))) .collect(Collectors.partitioningBy(Tuple::v1, Collectors.mapping(Tuple::v2, Collectors.toSet()))); } systemDataStreamIndices = Sets.union(backingIndices.getOrDefault(true, Set.of()), failureIndices.getOrDefault(true, Set.of())); @@ -773,7 +773,7 @@ static DataStream updateDataStream(DataStream dataStream, Metadata.Builder metad .map(i -> metadata.get(renameIndex(i.getName(), request, true, false)).getIndex()) .toList(); List updatedFailureIndices = DataStream.isFailureStoreFeatureFlagEnabled() - ? dataStream.getFailureIndices() + ? dataStream.getFailureComponent() .getIndices() .stream() .map(i -> metadata.get(renameIndex(i.getName(), request, false, true)).getIndex()) @@ -781,8 +781,8 @@ static DataStream updateDataStream(DataStream dataStream, Metadata.Builder metad : List.of(); return dataStream.copy() .setName(dataStreamName) - .setBackingIndices(dataStream.getBackingIndices().copy().setIndices(updatedIndices).build()) - .setFailureIndices(dataStream.getFailureIndices().copy().setIndices(updatedFailureIndices).build()) + .setBackingIndices(dataStream.getDataComponent().copy().setIndices(updatedIndices).build()) + .setFailureIndices(dataStream.getFailureComponent().copy().setIndices(updatedFailureIndices).build()) .build(); } diff --git a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java index 72d0a1330a249..a3d3af70de28a 100644 --- a/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java +++ b/server/src/main/java/org/elasticsearch/synonyms/SynonymsManagementAPIService.java @@ -565,7 +565,7 @@ private static String internalSynonymRuleId(String synonymsSetId, String synonym static Settings settings() { return Settings.builder() .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-all") + .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .put(IndexMetadata.INDEX_FORMAT_SETTING.getKey(), SYNONYMS_INDEX_FORMAT) .build(); } diff --git a/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java b/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java index c341b6e2a541c..e5fcfc37de3f0 100644 --- a/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java +++ b/server/src/main/java/org/elasticsearch/transport/OutboundHandler.java @@ -15,6 +15,7 @@ import org.apache.lucene.store.AlreadyClosedException; import org.apache.lucene.util.BytesRef; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.bytes.BytesReference; @@ -27,6 +28,7 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.core.UpdateForV10; import org.elasticsearch.threadpool.ThreadPool; import java.io.IOException; @@ -38,7 +40,10 @@ final class OutboundHandler { private static final Logger logger = LogManager.getLogger(OutboundHandler.class); private final String nodeName; + + @UpdateForV10(owner = UpdateForV10.Owner.DISTRIBUTED_COORDINATION) // only used in assertions, can be dropped in future private final TransportVersion version; + private final StatsTracker statsTracker; private final ThreadPool threadPool; private final Recycler recycler; @@ -98,11 +103,11 @@ void sendRequest( final Compression.Scheme compressionScheme, final boolean isHandshake ) throws IOException, TransportException { - TransportVersion version = TransportVersion.min(this.version, transportVersion); - OutboundMessage.Request message = new OutboundMessage.Request( + assert assertValidTransportVersion(transportVersion); + final OutboundMessage.Request message = new OutboundMessage.Request( threadPool.getThreadContext(), request, - version, + transportVersion, action, requestId, isHandshake, @@ -137,11 +142,11 @@ void sendResponse( final boolean isHandshake, final ResponseStatsConsumer responseStatsConsumer ) { - TransportVersion version = TransportVersion.min(this.version, transportVersion); + assert assertValidTransportVersion(transportVersion); OutboundMessage.Response message = new OutboundMessage.Response( threadPool.getThreadContext(), response, - version, + transportVersion, requestId, isHandshake, compressionScheme @@ -158,7 +163,11 @@ void sendResponse( } catch (Exception ex) { if (isHandshake) { logger.error( - () -> format("Failed to send handshake response version [%s] received on [%s], closing channel", version, channel), + () -> format( + "Failed to send handshake response version [%s] received on [%s], closing channel", + transportVersion, + channel + ), ex ); channel.close(); @@ -179,9 +188,15 @@ void sendErrorResponse( final ResponseStatsConsumer responseStatsConsumer, final Exception error ) { - TransportVersion version = TransportVersion.min(this.version, transportVersion); - RemoteTransportException tx = new RemoteTransportException(nodeName, channel.getLocalAddress(), action, error); - OutboundMessage.Response message = new OutboundMessage.Response(threadPool.getThreadContext(), tx, version, requestId, false, null); + assert assertValidTransportVersion(transportVersion); + OutboundMessage.Response message = new OutboundMessage.Response( + threadPool.getThreadContext(), + new RemoteTransportException(nodeName, channel.getLocalAddress(), action, error), + transportVersion, + requestId, + false, + null + ); try { sendMessage(channel, message, responseStatsConsumer, () -> messageListener.onResponseSent(requestId, action, error)); } catch (Exception sendException) { @@ -297,4 +312,10 @@ public boolean rstOnClose() { return rstOnClose; } + private boolean assertValidTransportVersion(TransportVersion transportVersion) { + assert this.version.before(TransportVersions.MINIMUM_COMPATIBLE) // running an incompatible-version test + || this.version.onOrAfter(transportVersion) : this.version + " vs " + transportVersion; + return true; + } + } diff --git a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java index c4fdf05f5640c..4eb16f327a5e7 100644 --- a/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java +++ b/server/src/main/java/org/elasticsearch/transport/TransportHandshaker.java @@ -257,7 +257,7 @@ public void handleResponse(HandshakeResponse response) { ) ); } else { - listener.onResponse(responseVersion); + listener.onResponse(TransportVersion.min(TransportHandshaker.this.version, response.getResponseVersion())); } } } diff --git a/server/src/main/resources/org/elasticsearch/TransportVersions.csv b/server/src/main/resources/org/elasticsearch/TransportVersions.csv index 2016f59b58a3e..81d9bf5cb30a4 100644 --- a/server/src/main/resources/org/elasticsearch/TransportVersions.csv +++ b/server/src/main/resources/org/elasticsearch/TransportVersions.csv @@ -136,4 +136,6 @@ 8.16.0,8772001 8.16.1,8772004 8.16.2,8772004 +8.16.3,8772004 8.17.0,8797002 +8.17.1,8797002 diff --git a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt index 69aa5102dec8d..10ef0c63e71c1 100644 --- a/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt +++ b/server/src/main/resources/org/elasticsearch/common/reference-docs-links.txt @@ -45,3 +45,4 @@ CIRCUIT_BREAKER_ERRORS circuit-breaker- ALLOCATION_EXPLAIN_NO_COPIES cluster-allocation-explain.html#no-valid-shard-copy ALLOCATION_EXPLAIN_MAX_RETRY cluster-allocation-explain.html#maximum-number-of-retries-exceeded SECURE_SETTINGS secure-settings.html +CLUSTER_SHARD_LIMIT misc-cluster-settings.html#cluster-shard-limit diff --git a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv index 3bfeeded6494c..874d9fe3d0150 100644 --- a/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv +++ b/server/src/main/resources/org/elasticsearch/index/IndexVersions.csv @@ -136,4 +136,6 @@ 8.16.0,8518000 8.16.1,8518000 8.16.2,8518000 +8.16.3,8518000 8.17.0,8521000 +8.17.1,8521000 diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java index 42984dc9da828..57750bb02bb14 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java @@ -673,9 +673,7 @@ public void testRolloverClusterStateForDataStreamFailureStore() throws Exception Metadata.Builder builder = Metadata.builder(); builder.put("template", template); dataStream.getIndices().forEach(index -> builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index))); - dataStream.getFailureIndices() - .getIndices() - .forEach(index -> builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index))); + dataStream.getFailureIndices().forEach(index -> builder.put(DataStreamTestHelper.getIndexMetadataBuilderForIndex(index))); builder.put(dataStream); final ClusterState clusterState = ClusterState.builder(new ClusterName("test")).metadata(builder).build(); final TestTelemetryPlugin telemetryPlugin = new TestTelemetryPlugin(); @@ -716,19 +714,16 @@ public void testRolloverClusterStateForDataStreamFailureStore() throws Exception assertEquals(sourceIndexName, rolloverResult.sourceIndexName()); assertEquals(newIndexName, rolloverResult.rolloverIndexName()); Metadata rolloverMetadata = rolloverResult.clusterState().metadata(); - assertEquals( - dataStream.getIndices().size() + dataStream.getFailureIndices().getIndices().size() + 1, - rolloverMetadata.indices().size() - ); + assertEquals(dataStream.getIndices().size() + dataStream.getFailureIndices().size() + 1, rolloverMetadata.indices().size()); IndexMetadata rolloverIndexMetadata = rolloverMetadata.index(newIndexName); var ds = (DataStream) rolloverMetadata.getIndicesLookup().get(dataStream.getName()); assertThat(ds.getType(), equalTo(IndexAbstraction.Type.DATA_STREAM)); assertThat(ds.getIndices(), hasSize(dataStream.getIndices().size())); - assertThat(ds.getFailureIndices().getIndices(), hasSize(dataStream.getFailureIndices().getIndices().size() + 1)); - assertThat(ds.getFailureIndices().getIndices(), hasItem(rolloverMetadata.index(sourceIndexName).getIndex())); - assertThat(ds.getFailureIndices().getIndices(), hasItem(rolloverIndexMetadata.getIndex())); - assertThat(ds.getFailureStoreWriteIndex(), equalTo(rolloverIndexMetadata.getIndex())); + assertThat(ds.getFailureIndices(), hasSize(dataStream.getFailureIndices().size() + 1)); + assertThat(ds.getFailureIndices(), hasItem(rolloverMetadata.index(sourceIndexName).getIndex())); + assertThat(ds.getFailureIndices(), hasItem(rolloverIndexMetadata.getIndex())); + assertThat(ds.getWriteFailureIndex(), equalTo(rolloverIndexMetadata.getIndex())); RolloverInfo info = rolloverMetadata.index(sourceIndexName).getRolloverInfos().get(dataStream.getName()); assertThat(info.getTime(), lessThanOrEqualTo(after)); @@ -753,7 +748,7 @@ public void testValidation() throws Exception { .promoteDataStream(); rolloverTarget = dataStream.getName(); if (dataStream.isFailureStoreExplicitlyEnabled() && randomBoolean()) { - sourceIndexName = dataStream.getFailureStoreWriteIndex().getName(); + sourceIndexName = dataStream.getWriteFailureIndex().getName(); isFailureStoreRollover = true; defaultRolloverIndexName = DataStream.getDefaultFailureStoreName( dataStream.getName(), diff --git a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java index 9360ce1719634..6155e11a127ec 100644 --- a/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java +++ b/server/src/test/java/org/elasticsearch/action/bulk/BulkOperationTests.java @@ -852,7 +852,10 @@ public void testLazilyRollingOverFailureStore() throws Exception { DataStream rolledOverDataStream = dataStream3.copy() .setFailureIndices( - dataStream3.getFailureIndices().copy().setIndices(List.of(ds3FailureStore1.getIndex(), ds3FailureStore2.getIndex())).build() + dataStream3.getFailureComponent() + .copy() + .setIndices(List.of(ds3FailureStore1.getIndex(), ds3FailureStore2.getIndex())) + .build() ) .build(); Metadata metadata = Metadata.builder(DEFAULT_STATE.metadata()) @@ -897,7 +900,10 @@ public void testFailureWhileRollingOverFailureStore() throws Exception { DataStream rolledOverDataStream = dataStream3.copy() .setFailureIndices( - dataStream3.getFailureIndices().copy().setIndices(List.of(ds3FailureStore1.getIndex(), ds3FailureStore2.getIndex())).build() + dataStream3.getFailureComponent() + .copy() + .setIndices(List.of(ds3FailureStore1.getIndex(), ds3FailureStore2.getIndex())) + .build() ) .build(); Metadata metadata = Metadata.builder(DEFAULT_STATE.metadata()) diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java index f7f299683c3fc..0b3ad270a9d2b 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/DataStreamTests.java @@ -101,11 +101,11 @@ protected DataStream mutateInstance(DataStream instance) { var indexMode = instance.getIndexMode(); var lifecycle = instance.getLifecycle(); var dataStreamOptions = instance.getDataStreamOptions(); - var failureIndices = instance.getFailureIndices().getIndices(); + var failureIndices = instance.getFailureIndices(); var rolloverOnWrite = instance.rolloverOnWrite(); var autoShardingEvent = instance.getAutoShardingEvent(); - var failureRolloverOnWrite = instance.getFailureIndices().isRolloverOnWrite(); - var failureAutoShardingEvent = instance.getBackingIndices().getAutoShardingEvent(); + var failureRolloverOnWrite = instance.getFailureComponent().isRolloverOnWrite(); + var failureAutoShardingEvent = instance.getDataComponent().getAutoShardingEvent(); switch (between(0, 15)) { case 0 -> name = randomAlphaOfLength(10); case 1 -> indices = randomNonEmptyIndexInstances(); @@ -197,7 +197,7 @@ protected DataStream mutateInstance(DataStream instance) { public void testRollover() { DataStream ds = DataStreamTestHelper.randomInstance().promoteDataStream(); - Tuple newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); + Tuple newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getDataComponent()); final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), null, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); @@ -224,7 +224,7 @@ public void testRolloverWithConflictingBackingIndexName() { builder.put(im, false); } - final Tuple newCoordinates = ds.nextWriteIndexAndGeneration(builder.build(), ds.getBackingIndices()); + final Tuple newCoordinates = ds.nextWriteIndexAndGeneration(builder.build(), ds.getDataComponent()); final DataStream rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), null, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + numConflictingIndices + 1)); @@ -240,7 +240,7 @@ public void testRolloverUpgradeToTsdbDataStream() { .setReplicated(false) .setIndexMode(randomBoolean() ? IndexMode.STANDARD : null) .build(); - var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); + var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getDataComponent()); var rolledDs = ds.rollover( new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), @@ -262,7 +262,7 @@ public void testRolloverUpgradeToLogsdbDataStream() { .setReplicated(false) .setIndexMode(randomBoolean() ? IndexMode.STANDARD : null) .build(); - var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); + var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getDataComponent()); var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), IndexMode.LOGSDB, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); @@ -275,7 +275,7 @@ public void testRolloverUpgradeToLogsdbDataStream() { public void testRolloverDowngradeFromTsdbToRegularDataStream() { DataStream ds = DataStreamTestHelper.randomInstance().copy().setReplicated(false).setIndexMode(IndexMode.TIME_SERIES).build(); - var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); + var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getDataComponent()); var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), null, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); @@ -288,7 +288,7 @@ public void testRolloverDowngradeFromTsdbToRegularDataStream() { public void testRolloverDowngradeFromLogsdbToRegularDataStream() { DataStream ds = DataStreamTestHelper.randomInstance().copy().setReplicated(false).setIndexMode(IndexMode.LOGSDB).build(); - var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getBackingIndices()); + var newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getDataComponent()); var rolledDs = ds.rollover(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2(), null, null); assertThat(rolledDs.getName(), equalTo(ds.getName())); @@ -301,18 +301,18 @@ public void testRolloverDowngradeFromLogsdbToRegularDataStream() { public void testRolloverFailureStore() { DataStream ds = DataStreamTestHelper.randomInstance(true).promoteDataStream(); - Tuple newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getFailureIndices()); + Tuple newCoordinates = ds.nextWriteIndexAndGeneration(Metadata.EMPTY_METADATA, ds.getFailureComponent()); final DataStream rolledDs = ds.rolloverFailureStore(new Index(newCoordinates.v1(), UUIDs.randomBase64UUID()), newCoordinates.v2()); assertThat(rolledDs.getName(), equalTo(ds.getName())); assertThat(rolledDs.getGeneration(), equalTo(ds.getGeneration() + 1)); assertThat(rolledDs.getIndices().size(), equalTo(ds.getIndices().size())); // Ensure that the rolloverOnWrite flag hasn't changed when rolling over a failure store. assertThat(rolledDs.rolloverOnWrite(), equalTo(ds.rolloverOnWrite())); - assertThat(rolledDs.getFailureIndices().getIndices().size(), equalTo(ds.getFailureIndices().getIndices().size() + 1)); + assertThat(rolledDs.getFailureIndices().size(), equalTo(ds.getFailureIndices().size() + 1)); assertTrue(rolledDs.getIndices().containsAll(ds.getIndices())); assertTrue(rolledDs.getIndices().contains(rolledDs.getWriteIndex())); - assertTrue(rolledDs.getFailureIndices().getIndices().containsAll(ds.getFailureIndices().getIndices())); - assertTrue(rolledDs.getFailureIndices().getIndices().contains(rolledDs.getFailureStoreWriteIndex())); + assertTrue(rolledDs.getFailureIndices().containsAll(ds.getFailureIndices())); + assertTrue(rolledDs.getFailureIndices().contains(rolledDs.getWriteFailureIndex())); } public void testRemoveBackingIndex() { @@ -361,18 +361,15 @@ public void testRemoveBackingWriteIndex() { public void testRemoveFailureStoreIndex() { DataStream original = createRandomDataStream(); - int indexToRemove = randomIntBetween(1, original.getFailureIndices().getIndices().size() - 1); + int indexToRemove = randomIntBetween(1, original.getFailureIndices().size() - 1); - DataStream updated = original.removeFailureStoreIndex(original.getFailureIndices().getIndices().get(indexToRemove - 1)); + DataStream updated = original.removeFailureStoreIndex(original.getFailureIndices().get(indexToRemove - 1)); assertThat(updated.getName(), equalTo(original.getName())); assertThat(updated.getGeneration(), equalTo(original.getGeneration() + 1)); assertThat(updated.getIndices().size(), equalTo(original.getIndices().size())); - assertThat(updated.getFailureIndices().getIndices().size(), equalTo(original.getFailureIndices().getIndices().size() - 1)); - for (int k = 0; k < (original.getFailureIndices().getIndices().size() - 1); k++) { - assertThat( - updated.getFailureIndices().getIndices().get(k), - equalTo(original.getFailureIndices().getIndices().get(k < (indexToRemove - 1) ? k : k + 1)) - ); + assertThat(updated.getFailureIndices().size(), equalTo(original.getFailureIndices().size() - 1)); + for (int k = 0; k < (original.getFailureIndices().size() - 1); k++) { + assertThat(updated.getFailureIndices().get(k), equalTo(original.getFailureIndices().get(k < (indexToRemove - 1) ? k : k + 1))); } } @@ -389,16 +386,16 @@ public void testRemoveFailureStoreIndexThatDoesNotExist() { public void testRemoveFailureStoreWriteIndex() { DataStream original = createRandomDataStream(); - int indexToRemove = original.getFailureIndices().getIndices().size() - 1; + int indexToRemove = original.getFailureIndices().size() - 1; - DataStream updated = original.removeFailureStoreIndex(original.getFailureIndices().getIndices().get(indexToRemove)); + DataStream updated = original.removeFailureStoreIndex(original.getFailureIndices().get(indexToRemove)); assertThat(updated.getName(), equalTo(original.getName())); assertThat(updated.getGeneration(), equalTo(original.getGeneration() + 1)); assertThat(updated.getIndices().size(), equalTo(original.getIndices().size())); - assertThat(updated.getFailureIndices().getIndices().size(), equalTo(original.getFailureIndices().getIndices().size() - 1)); - assertThat(updated.getFailureIndices().isRolloverOnWrite(), equalTo(true)); - for (int k = 0; k < (original.getFailureIndices().getIndices().size() - 1); k++) { - assertThat(updated.getFailureIndices().getIndices().get(k), equalTo(original.getFailureIndices().getIndices().get(k))); + assertThat(updated.getFailureIndices().size(), equalTo(original.getFailureIndices().size() - 1)); + assertThat(updated.getFailureComponent().isRolloverOnWrite(), equalTo(true)); + for (int k = 0; k < (original.getFailureIndices().size() - 1); k++) { + assertThat(updated.getFailureIndices().get(k), equalTo(original.getFailureIndices().get(k))); } } @@ -440,9 +437,9 @@ public void testAddBackingIndexThatIsPartOfAnotherDataStream() { builder.put(ds2); createMetadataForIndices(builder, ds1.getIndices()); - createMetadataForIndices(builder, ds1.getFailureIndices().getIndices()); + createMetadataForIndices(builder, ds1.getFailureIndices()); createMetadataForIndices(builder, ds2.getIndices()); - createMetadataForIndices(builder, ds2.getFailureIndices().getIndices()); + createMetadataForIndices(builder, ds2.getFailureIndices()); Index indexToAdd = randomFrom(ds2.getIndices().toArray(Index.EMPTY_ARRAY)); @@ -470,11 +467,11 @@ public void testAddBackingIndexThatIsPartOfDataStreamFailureStore() { builder.put(ds2); createMetadataForIndices(builder, ds1.getIndices()); - createMetadataForIndices(builder, ds1.getFailureIndices().getIndices()); + createMetadataForIndices(builder, ds1.getFailureIndices()); createMetadataForIndices(builder, ds2.getIndices()); - createMetadataForIndices(builder, ds2.getFailureIndices().getIndices()); + createMetadataForIndices(builder, ds2.getFailureIndices()); - Index indexToAdd = randomFrom(ds2.getFailureIndices().getIndices().toArray(Index.EMPTY_ARRAY)); + Index indexToAdd = randomFrom(ds2.getFailureIndices().toArray(Index.EMPTY_ARRAY)); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> ds1.addBackingIndex(builder.build(), indexToAdd)); assertThat( @@ -559,7 +556,7 @@ public void testAddFailureStoreIndex() { builder.put(original); createMetadataForIndices(builder, original.getIndices()); - createMetadataForIndices(builder, original.getFailureIndices().getIndices()); + createMetadataForIndices(builder, original.getFailureIndices()); Index indexToAdd = new Index(randomAlphaOfLength(4), UUIDs.randomBase64UUID(random())); builder.put( @@ -575,11 +572,11 @@ public void testAddFailureStoreIndex() { assertThat(updated.getName(), equalTo(original.getName())); assertThat(updated.getGeneration(), equalTo(original.getGeneration() + 1)); assertThat(updated.getIndices().size(), equalTo(original.getIndices().size())); - assertThat(updated.getFailureIndices().getIndices().size(), equalTo(original.getFailureIndices().getIndices().size() + 1)); - for (int k = 1; k <= original.getFailureIndices().getIndices().size(); k++) { - assertThat(updated.getFailureIndices().getIndices().get(k), equalTo(original.getFailureIndices().getIndices().get(k - 1))); + assertThat(updated.getFailureIndices().size(), equalTo(original.getFailureIndices().size() + 1)); + for (int k = 1; k <= original.getFailureIndices().size(); k++) { + assertThat(updated.getFailureIndices().get(k), equalTo(original.getFailureIndices().get(k - 1))); } - assertThat(updated.getFailureIndices().getIndices().get(0), equalTo(indexToAdd)); + assertThat(updated.getFailureIndices().get(0), equalTo(indexToAdd)); } public void testAddFailureStoreIndexThatIsPartOfAnotherDataStream() { @@ -591,11 +588,11 @@ public void testAddFailureStoreIndexThatIsPartOfAnotherDataStream() { builder.put(ds2); createMetadataForIndices(builder, ds1.getIndices()); - createMetadataForIndices(builder, ds1.getFailureIndices().getIndices()); + createMetadataForIndices(builder, ds1.getFailureIndices()); createMetadataForIndices(builder, ds2.getIndices()); - createMetadataForIndices(builder, ds2.getFailureIndices().getIndices()); + createMetadataForIndices(builder, ds2.getFailureIndices()); - Index indexToAdd = randomFrom(ds2.getFailureIndices().getIndices().toArray(Index.EMPTY_ARRAY)); + Index indexToAdd = randomFrom(ds2.getFailureIndices().toArray(Index.EMPTY_ARRAY)); IllegalArgumentException e = expectThrows( IllegalArgumentException.class, @@ -624,9 +621,9 @@ public void testAddFailureStoreIndexThatIsPartOfDataStreamBackingIndices() { builder.put(ds2); createMetadataForIndices(builder, ds1.getIndices()); - createMetadataForIndices(builder, ds1.getFailureIndices().getIndices()); + createMetadataForIndices(builder, ds1.getFailureIndices()); createMetadataForIndices(builder, ds2.getIndices()); - createMetadataForIndices(builder, ds2.getFailureIndices().getIndices()); + createMetadataForIndices(builder, ds2.getFailureIndices()); Index indexToAdd = randomFrom(ds2.getIndices().toArray(Index.EMPTY_ARRAY)); @@ -655,16 +652,16 @@ public void testAddExistingFailureStoreIndex() { builder.put(original); createMetadataForIndices(builder, original.getIndices()); - createMetadataForIndices(builder, original.getFailureIndices().getIndices()); + createMetadataForIndices(builder, original.getFailureIndices()); - Index indexToAdd = randomFrom(original.getFailureIndices().getIndices().toArray(Index.EMPTY_ARRAY)); + Index indexToAdd = randomFrom(original.getFailureIndices().toArray(Index.EMPTY_ARRAY)); DataStream updated = original.addFailureStoreIndex(builder.build(), indexToAdd); assertThat(updated.getName(), equalTo(original.getName())); assertThat(updated.getGeneration(), equalTo(original.getGeneration())); assertThat(updated.getIndices().size(), equalTo(original.getIndices().size())); - assertThat(updated.getFailureIndices().getIndices().size(), equalTo(original.getFailureIndices().getIndices().size())); - assertThat(updated.getFailureIndices().getIndices(), equalTo(original.getFailureIndices().getIndices())); + assertThat(updated.getFailureIndices().size(), equalTo(original.getFailureIndices().size())); + assertThat(updated.getFailureIndices(), equalTo(original.getFailureIndices())); } public void testAddFailureStoreIndexWithAliases() { @@ -674,7 +671,7 @@ public void testAddFailureStoreIndexWithAliases() { builder.put(original); createMetadataForIndices(builder, original.getIndices()); - createMetadataForIndices(builder, original.getFailureIndices().getIndices()); + createMetadataForIndices(builder, original.getFailureIndices()); Index indexToAdd = new Index(randomAlphaOfLength(4), UUIDs.randomBase64UUID(random())); IndexMetadata.Builder b = IndexMetadata.builder(indexToAdd.getName()) @@ -762,9 +759,9 @@ public void testReplaceBackingIndexThrowsExceptionIfIndexNotPartOfDataStream() { public void testReplaceBackingIndexThrowsExceptionIfIndexPartOfFailureStore() { DataStream original = createRandomDataStream(); - int indexToReplace = randomIntBetween(1, original.getFailureIndices().getIndices().size() - 1) - 1; + int indexToReplace = randomIntBetween(1, original.getFailureIndices().size() - 1) - 1; - Index failureIndex = original.getFailureIndices().getIndices().get(indexToReplace); + Index failureIndex = original.getFailureIndices().get(indexToReplace); Index newBackingIndex = new Index("replacement-index", UUIDs.randomBase64UUID(random())); expectThrows(IllegalArgumentException.class, () -> original.replaceBackingIndex(failureIndex, newBackingIndex)); } @@ -801,21 +798,18 @@ public void testReplaceBackingIndexThrowsExceptionIfReplacingWriteIndex() { public void testReplaceFailureIndex() { DataStream original = createRandomDataStream(); - int indexToReplace = randomIntBetween(1, original.getFailureIndices().getIndices().size() - 1) - 1; + int indexToReplace = randomIntBetween(1, original.getFailureIndices().size() - 1) - 1; Index newFailureIndex = new Index("replacement-index", UUIDs.randomBase64UUID(random())); - DataStream updated = original.replaceFailureStoreIndex( - original.getFailureIndices().getIndices().get(indexToReplace), - newFailureIndex - ); + DataStream updated = original.replaceFailureStoreIndex(original.getFailureIndices().get(indexToReplace), newFailureIndex); assertThat(updated.getName(), equalTo(original.getName())); assertThat(updated.getGeneration(), equalTo(original.getGeneration() + 1)); - assertThat(updated.getFailureIndices().getIndices().size(), equalTo(original.getFailureIndices().getIndices().size())); - assertThat(updated.getFailureIndices().getIndices().get(indexToReplace), equalTo(newFailureIndex)); + assertThat(updated.getFailureIndices().size(), equalTo(original.getFailureIndices().size())); + assertThat(updated.getFailureIndices().get(indexToReplace), equalTo(newFailureIndex)); - for (int i = 0; i < original.getFailureIndices().getIndices().size(); i++) { + for (int i = 0; i < original.getFailureIndices().size(); i++) { if (i != indexToReplace) { - assertThat(updated.getFailureIndices().getIndices().get(i), equalTo(original.getFailureIndices().getIndices().get(i))); + assertThat(updated.getFailureIndices().get(i), equalTo(original.getFailureIndices().get(i))); } } } @@ -886,23 +880,23 @@ public void testSnapshot() { postSnapshotBackingIndices.addAll(backingIndicesToAdd); // Mutate failure indices - var failureIndicesToRemove = randomSubsetOf(preSnapshotDataStream.getFailureIndices().getIndices()); + var failureIndicesToRemove = randomSubsetOf(preSnapshotDataStream.getFailureIndices()); var failureIndicesToAdd = randomIndexInstances(); - var postSnapshotFailureIndices = new ArrayList<>(preSnapshotDataStream.getFailureIndices().getIndices()); + var postSnapshotFailureIndices = new ArrayList<>(preSnapshotDataStream.getFailureIndices()); postSnapshotFailureIndices.removeAll(failureIndicesToRemove); postSnapshotFailureIndices.addAll(failureIndicesToAdd); var replicated = preSnapshotDataStream.isReplicated() && randomBoolean(); var postSnapshotDataStream = preSnapshotDataStream.copy() .setBackingIndices( - preSnapshotDataStream.getBackingIndices() + preSnapshotDataStream.getDataComponent() .copy() .setIndices(postSnapshotBackingIndices) .setRolloverOnWrite(replicated == false && preSnapshotDataStream.rolloverOnWrite()) .build() ) .setFailureIndices( - preSnapshotDataStream.getFailureIndices() + preSnapshotDataStream.getFailureComponent() .copy() .setIndices(postSnapshotFailureIndices) .setRolloverOnWrite(replicated == false && preSnapshotDataStream.rolloverOnWrite()) @@ -915,7 +909,7 @@ public void testSnapshot() { Set indicesInSnapshot = new HashSet<>(); preSnapshotDataStream.getIndices().forEach(index -> indicesInSnapshot.add(index.getName())); - preSnapshotDataStream.getFailureIndices().getIndices().forEach(index -> indicesInSnapshot.add(index.getName())); + preSnapshotDataStream.getFailureIndices().forEach(index -> indicesInSnapshot.add(index.getName())); var reconciledDataStream = postSnapshotDataStream.snapshot(indicesInSnapshot, Metadata.builder()); assertThat(reconciledDataStream.getName(), equalTo(postSnapshotDataStream.getName())); @@ -936,12 +930,12 @@ public void testSnapshot() { reconciledDataStream.getIndices().size(), equalTo(preSnapshotDataStream.getIndices().size() - backingIndicesToRemove.size()) ); - var reconciledFailureIndices = reconciledDataStream.getFailureIndices().getIndices(); + var reconciledFailureIndices = reconciledDataStream.getFailureIndices(); assertThat(reconciledFailureIndices, everyItem(not(in(failureIndicesToRemove)))); assertThat(reconciledFailureIndices, everyItem(not(in(failureIndicesToAdd)))); assertThat( reconciledFailureIndices.size(), - equalTo(preSnapshotDataStream.getFailureIndices().getIndices().size() - failureIndicesToRemove.size()) + equalTo(preSnapshotDataStream.getFailureIndices().size() - failureIndicesToRemove.size()) ); } @@ -950,7 +944,7 @@ public void testSnapshotWithAllBackingIndicesRemoved() { var indicesToAdd = randomNonEmptyIndexInstances(); var postSnapshotDataStream = preSnapshotDataStream.copy() - .setBackingIndices(preSnapshotDataStream.getBackingIndices().copy().setIndices(indicesToAdd).build()) + .setBackingIndices(preSnapshotDataStream.getDataComponent().copy().setIndices(indicesToAdd).build()) .build(); assertNull( @@ -2139,7 +2133,7 @@ public void testWriteFailureIndex() { replicated == false && randomBoolean(), null ); - assertThat(noFailureStoreDataStream.getFailureStoreWriteIndex(), nullValue()); + assertThat(noFailureStoreDataStream.getWriteFailureIndex(), nullValue()); DataStream failureStoreDataStreamWithEmptyFailureIndices = new DataStream( randomAlphaOfLength(10), @@ -2157,7 +2151,7 @@ public void testWriteFailureIndex() { replicated == false && randomBoolean(), null ); - assertThat(failureStoreDataStreamWithEmptyFailureIndices.getFailureStoreWriteIndex(), nullValue()); + assertThat(failureStoreDataStreamWithEmptyFailureIndices.getWriteFailureIndex(), nullValue()); List failureIndices = randomIndexInstances(); String dataStreamName = randomAlphaOfLength(10); @@ -2182,7 +2176,7 @@ public void testWriteFailureIndex() { replicated == false && randomBoolean(), null ); - assertThat(failureStoreDataStream.getFailureStoreWriteIndex(), is(writeFailureIndex)); + assertThat(failureStoreDataStream.getWriteFailureIndex(), is(writeFailureIndex)); } public void testIsFailureIndex() { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexAbstractionTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexAbstractionTests.java new file mode 100644 index 0000000000000..abcd65c3d2d25 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexAbstractionTests.java @@ -0,0 +1,211 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.cluster.metadata; + +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.test.ESTestCase; + +import java.util.List; +import java.util.Map; +import java.util.stream.Stream; + +import static org.hamcrest.CoreMatchers.nullValue; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; + +public class IndexAbstractionTests extends ESTestCase { + + public void testIndexAbstractionsDoNotSupportFailureIndices() { + AliasMetadata aliasMetadata = AliasMetadata.builder("my-alias").build(); + IndexMetadata standaloneIndexMetadata = newIndexMetadata("my-index", aliasMetadata); + IndexMetadata backingIndexMetadata = newIndexMetadata(".ds-my-ds", null); + IndexMetadata failureIndexMetadata = newIndexMetadata(".fs-my-ds", null); + DataStream dataStreamWithFs = newDataStreamInstance( + List.of(backingIndexMetadata.getIndex()), + List.of(failureIndexMetadata.getIndex()) + ); + Metadata metadata = Metadata.builder() + .put(standaloneIndexMetadata, false) + .put(backingIndexMetadata, false) + .put(failureIndexMetadata, false) + .dataStreams(Map.of(dataStreamWithFs.getName(), dataStreamWithFs), Map.of()) + .build(); + + // Concrete indices do not support failure store + + IndexAbstraction standaloneIndex = new IndexAbstraction.ConcreteIndex(standaloneIndexMetadata); + assertThat(standaloneIndex.getWriteIndex(), equalTo(standaloneIndexMetadata.getIndex())); + assertThat(standaloneIndex.getWriteFailureIndex(metadata), nullValue()); + assertThat(standaloneIndex.getFailureIndices(metadata), empty()); + + // Even if they belong to a data stream + IndexAbstraction backingIndex = new IndexAbstraction.ConcreteIndex(backingIndexMetadata, dataStreamWithFs); + assertThat(backingIndex.getWriteIndex(), equalTo(backingIndexMetadata.getIndex())); + assertThat(backingIndex.getWriteFailureIndex(metadata), nullValue()); + assertThat(backingIndex.getFailureIndices(metadata), empty()); + + IndexAbstraction failureIndex = new IndexAbstraction.ConcreteIndex(failureIndexMetadata, dataStreamWithFs); + assertThat(failureIndex.getWriteIndex(), equalTo(failureIndexMetadata.getIndex())); + assertThat(failureIndex.getWriteFailureIndex(metadata), nullValue()); + assertThat(failureIndex.getFailureIndices(metadata), empty()); + + // Aliases of standalone indices also do not support the failure store + List referenceIndices = List.of(standaloneIndexMetadata); + IndexAbstraction alias = new IndexAbstraction.Alias(aliasMetadata, referenceIndices); + assertThat(alias.getIndices(), containsInAnyOrder(referenceIndices.stream().map(IndexMetadata::getIndex).toArray())); + assertThat(alias.getWriteFailureIndex(metadata), nullValue()); + assertThat(alias.getFailureIndices(metadata), empty()); + } + + public void testIndexAbstractionsWithFailureIndices() { + IndexMetadata backingIndexMetadata = newIndexMetadata(".ds-my-fs", null); + IndexMetadata failureIndexMetadata = newIndexMetadata(".fs-my-fs", null); + IndexMetadata otherBackingIndexMetadata = newIndexMetadata(".ds-my-ds", null); + DataStream dsWithFailureStore = newDataStreamInstance( + List.of(backingIndexMetadata.getIndex()), + List.of(failureIndexMetadata.getIndex()) + ); + DataStream dsWithoutFailureStore = newDataStreamInstance(List.of(otherBackingIndexMetadata.getIndex()), List.of()); + DataStreamAlias aliasWithoutFailureStore = new DataStreamAlias( + "no-fs-alias", + List.of(dsWithoutFailureStore.getName()), + dsWithoutFailureStore.getName(), + Map.of() + ); + DataStreamAlias aliasWithFailureStore = new DataStreamAlias( + "with-fs-alias", + List.of(dsWithoutFailureStore.getName(), dsWithFailureStore.getName()), + dsWithFailureStore.getName(), + Map.of() + ); + DataStreamAlias aliasWithoutWriteDataStream = new DataStreamAlias( + "no-write-alias", + List.of(dsWithoutFailureStore.getName(), dsWithFailureStore.getName()), + null, + Map.of() + ); + DataStreamAlias aliasWithoutWriteFailureStoreDataStream = new DataStreamAlias( + "no-write-failure-stote-alias", + List.of(dsWithoutFailureStore.getName(), dsWithFailureStore.getName()), + dsWithoutFailureStore.getName(), + Map.of() + ); + Metadata metadata = Metadata.builder() + .put(otherBackingIndexMetadata, false) + .put(backingIndexMetadata, false) + .put(failureIndexMetadata, false) + .dataStreams( + Map.of(dsWithFailureStore.getName(), dsWithFailureStore, dsWithoutFailureStore.getName(), dsWithoutFailureStore), + Map.of( + aliasWithoutFailureStore.getAlias(), + aliasWithoutFailureStore, + aliasWithFailureStore.getAlias(), + aliasWithFailureStore, + aliasWithoutWriteDataStream.getAlias(), + aliasWithoutWriteDataStream + ) + ) + .build(); + + // Data stream with no failure store + assertThat(dsWithoutFailureStore.getWriteIndex(), equalTo(otherBackingIndexMetadata.getIndex())); + assertThat(dsWithoutFailureStore.getIndices(), contains(otherBackingIndexMetadata.getIndex())); + assertThat(dsWithoutFailureStore.getWriteFailureIndex(), nullValue()); + assertThat(dsWithoutFailureStore.getFailureIndices(), empty()); + + // Data stream with failure store + assertThat(dsWithFailureStore.getWriteIndex(), equalTo(backingIndexMetadata.getIndex())); + assertThat(dsWithFailureStore.getIndices(), contains(backingIndexMetadata.getIndex())); + assertThat(dsWithFailureStore.getWriteFailureIndex(), equalTo(failureIndexMetadata.getIndex())); + assertThat(dsWithFailureStore.getFailureIndices(), contains(failureIndexMetadata.getIndex())); + + // Alias with no write data stream + List referenceIndices = Stream.concat(dsWithFailureStore.getIndices().stream(), dsWithoutFailureStore.getIndices().stream()) + .toList(); + IndexAbstraction aliasWithNoWriteDs = new IndexAbstraction.Alias( + aliasWithoutWriteDataStream, + referenceIndices, + null, + List.of(dsWithFailureStore.getName(), dsWithoutFailureStore.getName()) + ); + assertThat(aliasWithNoWriteDs.getWriteIndex(), nullValue()); + assertThat(aliasWithNoWriteDs.getIndices(), containsInAnyOrder(referenceIndices.toArray())); + assertThat(aliasWithNoWriteDs.getWriteFailureIndex(metadata), nullValue()); + assertThat(aliasWithNoWriteDs.getFailureIndices(metadata), contains(failureIndexMetadata.getIndex())); + + // Alias with no failure store + IndexAbstraction aliasWithNoFs = new IndexAbstraction.Alias( + aliasWithoutFailureStore, + dsWithoutFailureStore.getIndices(), + dsWithoutFailureStore.getWriteIndex(), + List.of(dsWithoutFailureStore.getName()) + ); + assertThat(aliasWithNoFs.getWriteIndex(), equalTo(dsWithoutFailureStore.getWriteIndex())); + assertThat(aliasWithNoFs.getIndices(), contains(otherBackingIndexMetadata.getIndex())); + assertThat(aliasWithNoFs.getWriteFailureIndex(metadata), nullValue()); + assertThat(aliasWithNoFs.getFailureIndices(metadata), empty()); + + // Alias with failure store and write ds with failure store + IndexAbstraction aliasWithWriteFs = new IndexAbstraction.Alias( + aliasWithoutWriteDataStream, + referenceIndices, + dsWithFailureStore.getWriteIndex(), + List.of(dsWithFailureStore.getName(), dsWithoutFailureStore.getName()) + ); + assertThat(aliasWithWriteFs.getWriteIndex(), equalTo(backingIndexMetadata.getIndex())); + assertThat(aliasWithWriteFs.getIndices(), containsInAnyOrder(referenceIndices.toArray())); + assertThat(aliasWithWriteFs.getWriteFailureIndex(metadata), equalTo(failureIndexMetadata.getIndex())); + assertThat(aliasWithWriteFs.getFailureIndices(metadata), contains(failureIndexMetadata.getIndex())); + + // Alias with failure store and write ds without failure store + IndexAbstraction aliasWithWithoutWriteFs = new IndexAbstraction.Alias( + aliasWithoutWriteDataStream, + referenceIndices, + dsWithoutFailureStore.getWriteIndex(), + List.of(dsWithFailureStore.getName(), dsWithoutFailureStore.getName()) + ); + assertThat(aliasWithWithoutWriteFs.getWriteIndex(), equalTo(otherBackingIndexMetadata.getIndex())); + assertThat(aliasWithWithoutWriteFs.getIndices(), containsInAnyOrder(referenceIndices.toArray())); + assertThat(aliasWithWithoutWriteFs.getWriteFailureIndex(metadata), nullValue()); + assertThat(aliasWithWithoutWriteFs.getFailureIndices(metadata), contains(failureIndexMetadata.getIndex())); + } + + private IndexMetadata newIndexMetadata(String indexName, AliasMetadata aliasMetadata) { + Settings dummyIndexSettings = Settings.builder() + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()) + .put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetadata.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) + .build(); + + IndexMetadata.Builder builder = new IndexMetadata.Builder(indexName).settings(dummyIndexSettings); + if (aliasMetadata != null) { + builder.putAlias(aliasMetadata); + } + return builder.build(); + } + + private static DataStream newDataStreamInstance(List backingIndices, List failureStoreIndices) { + boolean isSystem = randomBoolean(); + return DataStream.builder(randomAlphaOfLength(50), backingIndices) + .setFailureIndices(DataStream.DataStreamIndices.failureIndicesBuilder(failureStoreIndices).build()) + .setGeneration(randomLongBetween(1, 1000)) + .setMetadata(Map.of()) + .setSystem(isSystem) + .setHidden(isSystem || randomBoolean()) + .setReplicated(randomBoolean()) + .build(); + } +} diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java index bb4c843bb7bdf..7cb13027a064d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataCreateDataStreamServiceTests.java @@ -333,7 +333,7 @@ public void testCreateDataStreamWithFailureStoreUninitialized() throws Exception assertThat(newState.metadata().dataStreams().get(dataStreamName).isSystem(), is(false)); assertThat(newState.metadata().dataStreams().get(dataStreamName).isHidden(), is(false)); assertThat(newState.metadata().dataStreams().get(dataStreamName).isReplicated(), is(false)); - assertThat(newState.metadata().dataStreams().get(dataStreamName).getFailureIndices().getIndices(), empty()); + assertThat(newState.metadata().dataStreams().get(dataStreamName).getFailureIndices(), empty()); assertThat(newState.metadata().index(backingIndexName), notNullValue()); assertThat(newState.metadata().index(backingIndexName).getSettings().get("index.hidden"), equalTo("true")); assertThat(newState.metadata().index(backingIndexName).isSystem(), is(false)); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java index 276c20d2d1322..3c8b6ebbfb271 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDataStreamsServiceTests.java @@ -360,7 +360,7 @@ public void testRemoveBrokenBackingIndexReference() { var original = state.getMetadata().dataStreams().get(dataStreamName); var broken = original.copy() .setBackingIndices( - original.getBackingIndices() + original.getDataComponent() .copy() .setIndices(List.of(new Index(original.getIndices().get(0).getName(), "broken"), original.getIndices().get(1))) .build() diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java index 3ada92dbe7ae5..9abbc59d672e2 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetadataDeleteIndexServiceTests.java @@ -304,10 +304,10 @@ public void testDeleteMultipleFailureIndexForDataStream() { DataStream dataStream = after.metadata().dataStreams().get(dataStreamName); assertThat(dataStream, notNullValue()); - assertThat(dataStream.getFailureIndices().getIndices().size(), equalTo(numBackingIndices - indexNumbersToDelete.size())); + assertThat(dataStream.getFailureIndices().size(), equalTo(numBackingIndices - indexNumbersToDelete.size())); for (Index i : indicesToDelete) { assertThat(after.metadata().getIndices().get(i.getName()), nullValue()); - assertFalse(dataStream.getFailureIndices().getIndices().contains(i)); + assertFalse(dataStream.getFailureIndices().contains(i)); } assertThat(after.metadata().getIndices().size(), equalTo((2 * numBackingIndices) - indexNumbersToDelete.size())); } diff --git a/server/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java b/server/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java index 2dabbcb40d012..79b06926cdef6 100644 --- a/server/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java +++ b/server/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java @@ -533,6 +533,10 @@ public void testBWCTransportFormat() throws IOException { } } + /** + * @see TransportVersions#REVERT_BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1 + */ + @AwaitsFix(bugUrl = "https://elasticco.atlassian.net/browse/ES-10585") public void testTwoDigitTransportRoundTrips() throws IOException { TransportVersion tv = TransportVersion.current(); for (var desiredUnit : ByteSizeUnit.values()) { diff --git a/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java index 15ef2e150761f..1ab7ca5633e76 100644 --- a/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/health/node/ShardsCapacityHealthIndicatorServiceTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; -import org.elasticsearch.features.FeatureService; import org.elasticsearch.health.HealthIndicatorDetails; import org.elasticsearch.health.HealthStatus; import org.elasticsearch.health.metadata.HealthMetadata; @@ -39,7 +38,6 @@ import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; -import org.mockito.Mockito; import java.io.IOException; import java.util.List; @@ -61,10 +59,13 @@ import static org.elasticsearch.indices.ShardLimitValidator.FROZEN_GROUP; import static org.elasticsearch.indices.ShardLimitValidator.INDEX_SETTING_SHARD_LIMIT_GROUP; import static org.elasticsearch.indices.ShardLimitValidator.NORMAL_GROUP; +import static org.elasticsearch.indices.ShardLimitValidator.SETTING_CLUSTER_MAX_SHARDS_PER_NODE; +import static org.elasticsearch.indices.ShardLimitValidator.SETTING_CLUSTER_MAX_SHARDS_PER_NODE_FROZEN; +import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; -import static org.mockito.ArgumentMatchers.any; public class ShardsCapacityHealthIndicatorServiceTests extends ESTestCase { @@ -73,7 +74,6 @@ public class ShardsCapacityHealthIndicatorServiceTests extends ESTestCase { private static ThreadPool threadPool; private ClusterService clusterService; - private FeatureService featureService; private DiscoveryNode dataNode; private DiscoveryNode frozenNode; @@ -92,9 +92,6 @@ public void setUp() throws Exception { .build(); clusterService = ClusterServiceUtils.createClusterService(threadPool); - - featureService = Mockito.mock(FeatureService.class); - Mockito.when(featureService.clusterHasFeature(any(), any())).thenReturn(true); } @After @@ -122,7 +119,7 @@ public void testNoShardsCapacityMetadata() throws IOException { createIndexInDataNode(100) ) ); - var target = new ShardsCapacityHealthIndicatorService(clusterService, featureService); + var target = new ShardsCapacityHealthIndicatorService(clusterService); var indicatorResult = target.calculate(true, HealthInfo.EMPTY_HEALTH_INFO); assertEquals(indicatorResult.status(), HealthStatus.UNKNOWN); @@ -136,10 +133,7 @@ public void testIndicatorYieldsGreenInCaseThereIsRoom() throws IOException { int maxShardsPerNode = randomValidMaxShards(); int maxShardsPerNodeFrozen = randomValidMaxShards(); var clusterService = createClusterService(maxShardsPerNode, maxShardsPerNodeFrozen, createIndexInDataNode(maxShardsPerNode / 4)); - var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( - true, - HealthInfo.EMPTY_HEALTH_INFO - ); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService).calculate(true, HealthInfo.EMPTY_HEALTH_INFO); assertEquals(indicatorResult.status(), HealthStatus.GREEN); assertTrue(indicatorResult.impacts().isEmpty()); @@ -158,15 +152,36 @@ public void testIndicatorYieldsGreenInCaseThereIsRoom() throws IOException { ); } + public void testDiagnoses() { + assertEquals("shards_capacity", SHARDS_MAX_CAPACITY_REACHED_DATA_NODES.definition().indicatorName()); + assertEquals("decrease_shards_per_non_frozen_node", SHARDS_MAX_CAPACITY_REACHED_DATA_NODES.definition().id()); + assertThat( + SHARDS_MAX_CAPACITY_REACHED_DATA_NODES.definition().cause(), + allOf(containsString("maximum number of shards"), containsString(SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey())) + ); + assertThat( + SHARDS_MAX_CAPACITY_REACHED_DATA_NODES.definition().action(), + allOf(containsString("Increase the number of nodes in your cluster"), containsString("remove some non-frozen indices")) + ); + + assertEquals("shards_capacity", SHARDS_MAX_CAPACITY_REACHED_FROZEN_NODES.definition().indicatorName()); + assertEquals("decrease_shards_per_frozen_node", SHARDS_MAX_CAPACITY_REACHED_FROZEN_NODES.definition().id()); + assertThat( + SHARDS_MAX_CAPACITY_REACHED_FROZEN_NODES.definition().cause(), + allOf(containsString("maximum number of shards"), containsString(SETTING_CLUSTER_MAX_SHARDS_PER_NODE_FROZEN.getKey())) + ); + assertThat( + SHARDS_MAX_CAPACITY_REACHED_FROZEN_NODES.definition().action(), + allOf(containsString("Increase the number of nodes in your cluster"), containsString("remove some frozen indices")) + ); + } + public void testIndicatorYieldsYellowInCaseThereIsNotEnoughRoom() throws IOException { { // Only data_nodes does not have enough space int maxShardsPerNodeFrozen = randomValidMaxShards(); var clusterService = createClusterService(25, maxShardsPerNodeFrozen, createIndexInDataNode(4)); - var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( - true, - HealthInfo.EMPTY_HEALTH_INFO - ); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService).calculate(true, HealthInfo.EMPTY_HEALTH_INFO); assertEquals(indicatorResult.status(), YELLOW); assertEquals(indicatorResult.symptom(), "Cluster is close to reaching the configured maximum number of shards for data nodes."); @@ -189,10 +204,7 @@ public void testIndicatorYieldsYellowInCaseThereIsNotEnoughRoom() throws IOExcep // Only frozen_nodes does not have enough space int maxShardsPerNode = randomValidMaxShards(); var clusterService = createClusterService(maxShardsPerNode, 25, createIndexInFrozenNode(4)); - var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( - true, - HealthInfo.EMPTY_HEALTH_INFO - ); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService).calculate(true, HealthInfo.EMPTY_HEALTH_INFO); assertEquals(indicatorResult.status(), YELLOW); assertEquals( @@ -217,10 +229,7 @@ public void testIndicatorYieldsYellowInCaseThereIsNotEnoughRoom() throws IOExcep { // Both data and frozen nodes does not have enough space var clusterService = createClusterService(25, 25, createIndexInDataNode(4), createIndexInFrozenNode(4)); - var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( - true, - HealthInfo.EMPTY_HEALTH_INFO - ); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService).calculate(true, HealthInfo.EMPTY_HEALTH_INFO); assertEquals(indicatorResult.status(), YELLOW); assertEquals( @@ -251,10 +260,7 @@ public void testIndicatorYieldsRedInCaseThereIsNotEnoughRoom() throws IOExceptio // Only data_nodes does not have enough space int maxShardsPerNodeFrozen = randomValidMaxShards(); var clusterService = createClusterService(25, maxShardsPerNodeFrozen, createIndexInDataNode(11)); - var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( - true, - HealthInfo.EMPTY_HEALTH_INFO - ); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService).calculate(true, HealthInfo.EMPTY_HEALTH_INFO); assertEquals(indicatorResult.status(), RED); assertEquals(indicatorResult.symptom(), "Cluster is close to reaching the configured maximum number of shards for data nodes."); @@ -277,10 +283,7 @@ public void testIndicatorYieldsRedInCaseThereIsNotEnoughRoom() throws IOExceptio // Only frozen_nodes does not have enough space int maxShardsPerNode = randomValidMaxShards(); var clusterService = createClusterService(maxShardsPerNode, 25, createIndexInFrozenNode(11)); - var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( - true, - HealthInfo.EMPTY_HEALTH_INFO - ); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService).calculate(true, HealthInfo.EMPTY_HEALTH_INFO); assertEquals(indicatorResult.status(), RED); assertEquals( @@ -305,10 +308,7 @@ public void testIndicatorYieldsRedInCaseThereIsNotEnoughRoom() throws IOExceptio { // Both data and frozen nodes does not have enough space var clusterService = createClusterService(25, 25, createIndexInDataNode(11), createIndexInFrozenNode(11)); - var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( - true, - HealthInfo.EMPTY_HEALTH_INFO - ); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService).calculate(true, HealthInfo.EMPTY_HEALTH_INFO); assertEquals(indicatorResult.status(), RED); assertEquals( @@ -377,11 +377,11 @@ public void testCalculateMethods() { public void testMappedFieldsForTelemetry() { assertEquals(ShardsCapacityHealthIndicatorService.NAME, "shards_capacity"); assertEquals( - "elasticsearch:health:shards_capacity:diagnosis:increase_max_shards_per_node", + "elasticsearch:health:shards_capacity:diagnosis:decrease_shards_per_non_frozen_node", SHARDS_MAX_CAPACITY_REACHED_DATA_NODES.definition().getUniqueId() ); assertEquals( - "elasticsearch:health:shards_capacity:diagnosis:increase_max_shards_per_node_frozen", + "elasticsearch:health:shards_capacity:diagnosis:decrease_shards_per_frozen_node", SHARDS_MAX_CAPACITY_REACHED_FROZEN_NODES.definition().getUniqueId() ); } @@ -389,10 +389,7 @@ public void testMappedFieldsForTelemetry() { public void testSkippingFieldsWhenVerboseIsFalse() { int maxShardsPerNodeFrozen = randomValidMaxShards(); var clusterService = createClusterService(25, maxShardsPerNodeFrozen, createIndexInDataNode(11)); - var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService, featureService).calculate( - false, - HealthInfo.EMPTY_HEALTH_INFO - ); + var indicatorResult = new ShardsCapacityHealthIndicatorService(clusterService).calculate(false, HealthInfo.EMPTY_HEALTH_INFO); assertEquals(indicatorResult.status(), RED); assertEquals(indicatorResult.symptom(), "Cluster is close to reaching the configured maximum number of shards for data nodes."); @@ -441,7 +438,7 @@ private ClusterState createClusterState( var metadata = Metadata.builder() .persistentSettings( Settings.builder() - .put(ShardLimitValidator.SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey(), maxShardsPerNode) + .put(SETTING_CLUSTER_MAX_SHARDS_PER_NODE.getKey(), maxShardsPerNode) .put(ShardLimitValidator.SETTING_CLUSTER_MAX_SHARDS_PER_NODE_FROZEN.getKey(), maxShardsPerNodeFrozen) .build() ); diff --git a/server/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/server/src/test/java/org/elasticsearch/index/IndexSettingsTests.java index e28f0f9e6c785..691ca7682f30c 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexSettingsTests.java @@ -10,6 +10,7 @@ package org.elasticsearch.index; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.IndexMetadataVerifier; import org.elasticsearch.common.settings.AbstractScopedSettings; import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; @@ -17,7 +18,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.mapper.MapperMetrics; +import org.elasticsearch.index.mapper.MapperRegistry; import org.elasticsearch.index.translog.Translog; +import org.elasticsearch.plugins.MapperPlugin; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.index.IndexVersionUtils; import org.hamcrest.Matchers; @@ -39,6 +43,7 @@ import static org.elasticsearch.index.IndexSettings.STATELESS_MIN_NON_FAST_REFRESH_INTERVAL; import static org.elasticsearch.index.IndexSettings.TIME_SERIES_END_TIME; import static org.elasticsearch.index.IndexSettings.TIME_SERIES_START_TIME; +import static org.elasticsearch.index.mapper.MapperService.INDEX_MAPPER_DYNAMIC_SETTING; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.core.StringContains.containsString; @@ -839,6 +844,35 @@ public void testTimeSeriesTimeBoundary() { assertThat(e.getMessage(), Matchers.containsString("index.time_series.end_time must be larger than index.time_series.start_time")); } + public void testIndexMapperDynamic() { + Settings settings = Settings.builder().put(INDEX_MAPPER_DYNAMIC_SETTING.getKey(), randomBoolean()).build(); + + INDEX_MAPPER_DYNAMIC_SETTING.get(settings); + assertWarnings( + "[index.mapper.dynamic] setting was deprecated in the previous Elasticsearch release and is removed in this release." + ); + + IndexMetadata idxMetaData = newIndexMeta("test", settings); + IndexMetadataVerifier indexMetadataVerifier = new IndexMetadataVerifier( + Settings.EMPTY, + null, + xContentRegistry(), + new MapperRegistry(Collections.emptyMap(), Collections.emptyMap(), Collections.emptyMap(), MapperPlugin.NOOP_FIELD_FILTER), + IndexScopedSettings.DEFAULT_SCOPED_SETTINGS, + null, + MapperMetrics.NOOP + ); + IndexMetadata verifiedMetaData = indexMetadataVerifier.verifyIndexMetadata( + idxMetaData, + IndexVersions.MINIMUM_COMPATIBLE, + IndexVersions.MINIMUM_READONLY_COMPATIBLE + ); + assertEquals(idxMetaData, verifiedMetaData); + assertWarnings( + "[index.mapper.dynamic] setting was deprecated in the previous Elasticsearch release and is removed in this release." + ); + } + public void testSame() { final var indexSettingKey = "index.example.setting"; final var archivedSettingKey = "archived.example.setting"; diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index d5d9d84673a0e..c9fe314056331 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -401,7 +401,7 @@ public void testGeoHashWithSubCompletionAndStringInsert() throws Exception { ParsedDocument parsedDocument = defaultMapper.parse(source(b -> b.field("field", "drm3btev3e86"))); LuceneDocument indexableFields = parsedDocument.rootDoc(); - assertThat(indexableFields.getFields("field"), hasSize(2)); + assertThat(indexableFields.getFields("field"), hasSize(1)); assertThat(indexableFields.getFields("field.analyzed"), containsInAnyOrder(suggestField("drm3btev3e86"))); // unable to assert about geofield content, covered in a REST test } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java index 0f39a718302e0..aa184ddf465d5 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CopyToMapperTests.java @@ -726,10 +726,10 @@ public void testCopyToGeoPoint() throws Exception { LuceneDocument doc = docMapper.parse(new SourceToParse("1", json, XContentType.JSON)).rootDoc(); List fields = doc.getFields("geopoint"); - assertThat(fields.size(), equalTo(2)); + assertThat(fields.size(), equalTo(1)); fields = doc.getFields("geopoint_copy"); - assertThat(fields.size(), equalTo(2)); + assertThat(fields.size(), equalTo(1)); } } // check failure for object/array type representations diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java index d128b25038a59..5b32350cbf4b8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocumentParserTests.java @@ -10,8 +10,6 @@ package org.elasticsearch.index.mapper; import org.apache.lucene.document.Field; -import org.apache.lucene.document.LatLonDocValuesField; -import org.apache.lucene.document.LatLonPoint; import org.apache.lucene.document.LongField; import org.apache.lucene.document.StringField; import org.apache.lucene.index.IndexableField; @@ -1120,15 +1118,13 @@ public void testWithDynamicTemplates() throws Exception { ParsedDocument doc = mapper.parse(source("1", b -> b.field(field, "41.12,-71.34"), null, Map.of(field, "points"))); List fields = doc.rootDoc().getFields(field); - assertThat(fields, hasSize(2)); - assertThat(fields.get(0).fieldType(), sameInstance(LatLonPoint.TYPE)); - assertThat(fields.get(1).fieldType(), sameInstance(LatLonDocValuesField.TYPE)); + assertThat(fields, hasSize(1)); + assertThat(fields.get(0).fieldType(), sameInstance(GeoPointFieldMapper.LatLonPointWithDocValues.TYPE)); doc = mapper.parse(source("1", b -> b.field(field, new double[] { -71.34, 41.12 }), null, Map.of(field, "points"))); fields = doc.rootDoc().getFields(field); - assertThat(fields, hasSize(2)); - assertThat(fields.get(0).fieldType(), sameInstance(LatLonPoint.TYPE)); - assertThat(fields.get(1).fieldType(), sameInstance(LatLonDocValuesField.TYPE)); + assertThat(fields, hasSize(1)); + assertThat(fields.get(0).fieldType(), sameInstance(GeoPointFieldMapper.LatLonPointWithDocValues.TYPE)); doc = mapper.parse(source("1", b -> { b.startObject(field); @@ -1137,16 +1133,13 @@ public void testWithDynamicTemplates() throws Exception { b.endObject(); }, null, Map.of(field, "points"))); fields = doc.rootDoc().getFields(field); - assertThat(fields, hasSize(2)); - assertThat(fields.get(0).fieldType(), sameInstance(LatLonPoint.TYPE)); - assertThat(fields.get(1).fieldType(), sameInstance(LatLonDocValuesField.TYPE)); + assertThat(fields, hasSize(1)); + assertThat(fields.get(0).fieldType(), sameInstance(GeoPointFieldMapper.LatLonPointWithDocValues.TYPE)); doc = mapper.parse(source("1", b -> b.field(field, new String[] { "41.12,-71.34", "43,-72.34" }), null, Map.of(field, "points"))); fields = doc.rootDoc().getFields(field); - assertThat(fields, hasSize(4)); - assertThat(fields.get(0).fieldType(), sameInstance(LatLonPoint.TYPE)); - assertThat(fields.get(1).fieldType(), sameInstance(LatLonDocValuesField.TYPE)); - assertThat(fields.get(2).fieldType(), sameInstance(LatLonPoint.TYPE)); - assertThat(fields.get(3).fieldType(), sameInstance(LatLonDocValuesField.TYPE)); + assertThat(fields, hasSize(2)); + assertThat(fields.get(0).fieldType(), sameInstance(GeoPointFieldMapper.LatLonPointWithDocValues.TYPE)); + assertThat(fields.get(1).fieldType(), sameInstance(GeoPointFieldMapper.LatLonPointWithDocValues.TYPE)); doc = mapper.parse(source("1", b -> { b.startArray(field); @@ -1162,11 +1155,9 @@ public void testWithDynamicTemplates() throws Exception { b.endArray(); }, null, Map.of(field, "points"))); fields = doc.rootDoc().getFields(field); - assertThat(fields, hasSize(4)); - assertThat(fields.get(0).fieldType(), sameInstance(LatLonPoint.TYPE)); - assertThat(fields.get(1).fieldType(), sameInstance(LatLonDocValuesField.TYPE)); - assertThat(fields.get(2).fieldType(), sameInstance(LatLonPoint.TYPE)); - assertThat(fields.get(3).fieldType(), sameInstance(LatLonDocValuesField.TYPE)); + assertThat(fields, hasSize(2)); + assertThat(fields.get(0).fieldType(), sameInstance(GeoPointFieldMapper.LatLonPointWithDocValues.TYPE)); + assertThat(fields.get(1).fieldType(), sameInstance(GeoPointFieldMapper.LatLonPointWithDocValues.TYPE)); doc = mapper.parse(source("1", b -> { b.startObject("address"); @@ -1174,9 +1165,8 @@ public void testWithDynamicTemplates() throws Exception { b.endObject(); }, null, Map.of("address.home", "points"))); fields = doc.rootDoc().getFields("address.home"); - assertThat(fields, hasSize(2)); - assertThat(fields.get(0).fieldType(), sameInstance(LatLonPoint.TYPE)); - assertThat(fields.get(1).fieldType(), sameInstance(LatLonDocValuesField.TYPE)); + assertThat(fields, hasSize(1)); + assertThat(fields.get(0).fieldType(), sameInstance(GeoPointFieldMapper.LatLonPointWithDocValues.TYPE)); } public void testDynamicTemplatesNotFound() throws Exception { @@ -3003,7 +2993,7 @@ public void testSubobjectsFalseDocsWithInnerObjectMappedAsFieldThatCanParseNativ assertNotNull(location); assertNull(parsedDocument.rootDoc().getField("metrics.service.location.lat")); assertNull(parsedDocument.rootDoc().getField("metrics.service.location.lon")); - assertTrue(location instanceof LatLonPoint); + assertTrue(location instanceof GeoPointFieldMapper.LatLonPointWithDocValues); Mapper locationMapper = mapper.mappers().getMapper("metrics.service.location"); assertNotNull(locationMapper); assertTrue(locationMapper instanceof GeoPointFieldMapper); @@ -3108,7 +3098,10 @@ public void testSubobjectsFalseDocsWithInnerObjectThatCanBeParsedNatively() thro """)); assertNull(parsedDocument.rootDoc().getField("metrics.service.location.lat")); assertNull(parsedDocument.rootDoc().getField("metrics.service.location.lon")); - assertThat(parsedDocument.rootDoc().getField("metrics.service.location"), instanceOf(LatLonPoint.class)); + assertThat( + parsedDocument.rootDoc().getField("metrics.service.location"), + instanceOf(GeoPointFieldMapper.LatLonPointWithDocValues.class) + ); assertThat(mapper.mappers().getMapper("metrics.service.location"), instanceOf(GeoPointFieldMapper.class)); } @@ -3222,7 +3215,7 @@ public void testSubobjectsFalseDocsWithGeoPointFromDynamicTemplate() throws Exce } """)); - assertThat(parsedDocument.rootDoc().getField("location"), instanceOf(LatLonPoint.class)); + assertThat(parsedDocument.rootDoc().getField("location"), instanceOf(GeoPointFieldMapper.LatLonPointWithDocValues.class)); RootObjectMapper root = parsedDocument.dynamicMappingsUpdate().getRoot(); assertEquals(1, root.mappers.size()); assertThat(root.getMapper("location"), instanceOf(GeoPointFieldMapper.class)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java index c77b434168492..78643a2d581cc 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplatesTests.java @@ -678,7 +678,7 @@ public void testTemplateWithoutMatchPredicates() throws Exception { XContentMeteringParserDecorator.NOOP ) ); - assertThat(doc.rootDoc().getFields("foo"), hasSize(2)); + assertThat(doc.rootDoc().getFields("foo"), hasSize(1)); assertThat(doc.rootDoc().getFields("bar"), hasSize(1)); } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java index 0182da8ade48a..bcbd40677a46c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/FieldNamesFieldMapperTests.java @@ -10,7 +10,9 @@ package org.elasticsearch.index.mapper; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.termvectors.TermVectorsService; +import org.elasticsearch.test.index.IndexVersionUtils; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; @@ -78,4 +80,39 @@ public void testUsingEnabledSettingThrows() { ex.getMessage() ); } + + /** + * disabling the _field_names should still work for indices before 8.0 + */ + public void testUsingEnabledBefore8() throws Exception { + DocumentMapper docMapper = createDocumentMapper( + IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0), + topMapping(b -> b.startObject("_field_names").field("enabled", false).endObject()) + ); + + assertWarnings(FieldNamesFieldMapper.ENABLED_DEPRECATION_MESSAGE); + FieldNamesFieldMapper fieldNamesMapper = docMapper.metadataMapper(FieldNamesFieldMapper.class); + assertFalse(fieldNamesMapper.fieldType().isEnabled()); + + ParsedDocument doc = docMapper.parse(source(b -> b.field("field", "value"))); + assertNull(doc.rootDoc().get("_field_names")); + } + + /** + * Merging the "_field_names" enabled setting is forbidden in 8.0, but we still want to tests the behavior on pre-8 indices + */ + public void testMergingMappingsBefore8() throws Exception { + MapperService mapperService = createMapperService( + IndexVersionUtils.randomPreviousCompatibleVersion(random(), IndexVersions.V_8_0_0), + mapping(b -> {}) + ); + + merge(mapperService, topMapping(b -> b.startObject("_field_names").field("enabled", false).endObject())); + assertFalse(mapperService.documentMapper().metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); + assertWarnings(FieldNamesFieldMapper.ENABLED_DEPRECATION_MESSAGE); + + merge(mapperService, topMapping(b -> b.startObject("_field_names").field("enabled", true).endObject())); + assertTrue(mapperService.documentMapper().metadataMapper(FieldNamesFieldMapper.class).fieldType().isEnabled()); + assertWarnings(FieldNamesFieldMapper.ENABLED_DEPRECATION_MESSAGE); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java index ec904e5af2d20..3a53440403c95 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldMapperTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.LatLonDocValuesField; import org.apache.lucene.document.LatLonPoint; import org.apache.lucene.geo.GeoEncodingUtils; -import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; import org.elasticsearch.cluster.metadata.IndexMetadata; @@ -194,8 +193,8 @@ public void testMetricAndMultiValues() throws Exception { new Object[] { new Double[] { pointA.getX(), pointA.getY() }, new Double[] { pointB.getX(), pointB.getY() } }, new Object[] { pointA.getY() + "," + pointA.getX(), pointB.getY() + "," + pointB.getX() }, new Object[] { GeoJson.toMap(pointA), GeoJson.toMap(pointB) } }; - IndexableField expectedPointA = new LatLonPoint("field", pointA.getY(), pointA.getX()); - IndexableField expectedPointB = new LatLonPoint("field", pointB.getY(), pointB.getX()); + IndexableField expectedPointA = new GeoPointFieldMapper.LatLonPointWithDocValues("field", pointA.getY(), pointA.getX()); + IndexableField expectedPointB = new GeoPointFieldMapper.LatLonPointWithDocValues("field", pointB.getY(), pointB.getX()); // Verify that metric and non-metric mappers behave the same on single valued fields for (Object[] values : data) { @@ -203,7 +202,7 @@ public void testMetricAndMultiValues() throws Exception { ParsedDocument doc = mapper.parse(source(b -> b.field("field", values[0]))); assertThat(doc.rootDoc().getField("field"), notNullValue()); IndexableField field = doc.rootDoc().getField("field"); - assertThat(field, instanceOf(LatLonPoint.class)); + assertThat(field, instanceOf(GeoPointFieldMapper.LatLonPointWithDocValues.class)); assertThat(field.toString(), equalTo(expectedPointA.toString())); } } @@ -214,15 +213,11 @@ public void testMetricAndMultiValues() throws Exception { { ParsedDocument doc = nonMetricMapper.parse(source(b -> b.field("field", values))); assertThat(doc.rootDoc().getField("field"), notNullValue()); - Object[] fields = doc.rootDoc() - .getFields() - .stream() - .filter(f -> f.name().equals("field") && f.fieldType().docValuesType() == DocValuesType.NONE) - .toArray(); + Object[] fields = doc.rootDoc().getFields().stream().filter(f -> f.name().equals("field")).toArray(); assertThat(fields.length, equalTo(2)); - assertThat(fields[0], instanceOf(LatLonPoint.class)); + assertThat(fields[0], instanceOf(GeoPointFieldMapper.LatLonPointWithDocValues.class)); assertThat(fields[0].toString(), equalTo(expectedPointA.toString())); - assertThat(fields[1], instanceOf(LatLonPoint.class)); + assertThat(fields[1], instanceOf(GeoPointFieldMapper.LatLonPointWithDocValues.class)); assertThat(fields[1].toString(), equalTo(expectedPointB.toString())); } // Metric mapper rejects multi-valued data @@ -328,7 +323,7 @@ public void testLonLatArrayDynamic() throws Exception { public void testLonLatArrayStored() throws Exception { DocumentMapper mapper = createDocumentMapper(fieldMapping(b -> b.field("type", "geo_point").field("store", true))); ParsedDocument doc = mapper.parse(source(b -> b.startArray("field").value(1.3).value(1.2).endArray())); - assertThat(doc.rootDoc().getFields("field"), hasSize(3)); + assertThat(doc.rootDoc().getFields("field"), hasSize(2)); } public void testLonLatArrayArrayStored() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptMapperTests.java index 03ace413e266d..da9fb85d6b820 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointScriptMapperTests.java @@ -74,11 +74,9 @@ protected GeoPointFieldScript.Factory multipleValuesScript() { @Override protected void assertMultipleValues(List fields) { - assertEquals(4, fields.size()); - assertEquals("LatLonPoint ", fields.get(0).toString()); - assertEquals("LatLonDocValuesField ", fields.get(1).toString()); - assertEquals("LatLonPoint ", fields.get(2).toString()); - assertEquals("LatLonDocValuesField ", fields.get(3).toString()); + assertEquals(2, fields.size()); + assertEquals("LatLonPointWithDocValues ", fields.get(0).toString()); + assertEquals("LatLonPointWithDocValues ", fields.get(1).toString()); } @Override diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IntervalThrottlerTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IntervalThrottlerTests.java index 25fd614524441..399c97260ceb4 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IntervalThrottlerTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IntervalThrottlerTests.java @@ -14,12 +14,12 @@ public class IntervalThrottlerTests extends ESTestCase { public void testThrottling() throws Exception { - var throttler = new IntervalThrottler.Acceptor(10); + var throttler = new IntervalThrottler.Acceptor(100); assertTrue(throttler.accept()); assertFalse(throttler.accept()); assertFalse(throttler.accept()); - Thread.sleep(20); + Thread.sleep(110); assertTrue(throttler.accept()); assertFalse(throttler.accept()); assertFalse(throttler.accept()); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java index 911fe6d4b9337..8582f26349ef2 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ObjectMapperTests.java @@ -329,6 +329,40 @@ public void testDisallowFieldReplacementForIndexTemplates() throws IOException { assertThat(e.getMessage(), containsString("can't merge a non object mapping [object.field1] with an object mapping")); } + public void testFieldReplacementSubobjectsFalse() throws IOException { + MapperService mapperService = createMapperService(mapping(b -> { + b.startObject("obj").field("type", "object").field("subobjects", false).startObject("properties"); + { + b.startObject("my.field").field("type", "keyword").endObject(); + } + b.endObject().endObject(); + })); + DocumentMapper mapper = mapperService.documentMapper(); + assertNull(mapper.mapping().getRoot().dynamic()); + Mapping mergeWith = mapperService.parseMapping( + "_doc", + MergeReason.INDEX_TEMPLATE, + new CompressedXContent(BytesReference.bytes(topMapping(b -> { + b.startObject("properties").startObject("obj").field("type", "object").field("subobjects", false).startObject("properties"); + { + b.startObject("my.field").field("type", "long").endObject(); + } + b.endObject().endObject().endObject(); + }))) + ); + + // Fails on mapping update. + IllegalArgumentException exception = expectThrows( + IllegalArgumentException.class, + () -> mapper.mapping().merge(mergeWith, MergeReason.MAPPING_UPDATE, Long.MAX_VALUE) + ); + assertEquals("mapper [obj.my.field] cannot be changed from type [keyword] to [long]", exception.getMessage()); + + // Passes on template merging. + Mapping merged = mapper.mapping().merge(mergeWith, MergeReason.INDEX_TEMPLATE, Long.MAX_VALUE); + assertThat(((ObjectMapper) merged.getRoot().getMapper("obj")).getMapper("my.field"), instanceOf(NumberFieldMapper.class)); + } + public void testUnknownLegacyFields() throws Exception { MapperService service = createMapperService(IndexVersion.fromId(5000099), Settings.EMPTY, () -> false, mapping(b -> { b.startObject("name"); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 0bbf63b07c4cb..7d436ab5d8d22 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -790,21 +790,6 @@ public void onFailure(final Exception e) { } }, TimeValue.timeValueSeconds(30)); latch.await(); - - // It's possible to acquire permits if we skip the primary mode check - var permitAcquiredLatch = new CountDownLatch(1); - indexShard.acquirePrimaryOperationPermit(ActionListener.wrap(r -> { - r.close(); - permitAcquiredLatch.countDown(); - }, Assert::assertNotNull), EsExecutors.DIRECT_EXECUTOR_SERVICE, false, IndexShard.PrimaryPermitCheck.NONE); - safeAwait(permitAcquiredLatch); - - var allPermitsAcquiredLatch = new CountDownLatch(1); - indexShard.acquireAllPrimaryOperationsPermits(ActionListener.wrap(r -> { - r.close(); - allPermitsAcquiredLatch.countDown(); - }, Assert::assertNotNull), TimeValue.timeValueSeconds(30), IndexShard.PrimaryPermitCheck.NONE); - safeAwait(allPermitsAcquiredLatch); } if (Assertions.ENABLED && indexShard.routingEntry().isRelocationTarget() == false) { diff --git a/server/src/test/java/org/elasticsearch/inference/SettingsConfigurationTestUtils.java b/server/src/test/java/org/elasticsearch/inference/SettingsConfigurationTestUtils.java index ed78baeb9abe6..7e68f41de1b2e 100644 --- a/server/src/test/java/org/elasticsearch/inference/SettingsConfigurationTestUtils.java +++ b/server/src/test/java/org/elasticsearch/inference/SettingsConfigurationTestUtils.java @@ -11,6 +11,8 @@ import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; +import java.util.EnumSet; + import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; import static org.elasticsearch.test.ESTestCase.randomBoolean; import static org.elasticsearch.test.ESTestCase.randomInt; @@ -18,7 +20,9 @@ public class SettingsConfigurationTestUtils { public static SettingsConfiguration getRandomSettingsConfigurationField() { - return new SettingsConfiguration.Builder().setDefaultValue(randomAlphaOfLength(10)) + return new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.SPARSE_EMBEDDING)).setDefaultValue( + randomAlphaOfLength(10) + ) .setDescription(randomAlphaOfLength(10)) .setLabel(randomAlphaOfLength(10)) .setRequired(randomBoolean()) diff --git a/server/src/test/java/org/elasticsearch/inference/SettingsConfigurationTests.java b/server/src/test/java/org/elasticsearch/inference/SettingsConfigurationTests.java index 551a25fe52f18..2b286cf86a1c8 100644 --- a/server/src/test/java/org/elasticsearch/inference/SettingsConfigurationTests.java +++ b/server/src/test/java/org/elasticsearch/inference/SettingsConfigurationTests.java @@ -34,7 +34,8 @@ public void testToXContent() throws IOException { "required": true, "sensitive": false, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion", "sparse_embedding", "rerank"] } """); @@ -56,7 +57,8 @@ public void testToXContent_WithNumericSelectOptions() throws IOException { "required": true, "sensitive": false, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding"] } """); @@ -74,7 +76,8 @@ public void testToXContentCrawlerConfig_WithNullValue() throws IOException { String content = XContentHelper.stripWhitespace(""" { "label": "nextSyncConfig", - "value": null + "value": null, + "supported_task_types": ["text_embedding", "completion", "sparse_embedding", "rerank"] } """); diff --git a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java index b1cf40d7f22ec..7cb12c1b316e8 100644 --- a/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/reservedstate/service/FileSettingsServiceTests.java @@ -214,6 +214,9 @@ public void testInitialFileError() throws Exception { // wait until the watcher thread has started, and it has discovered the file assertTrue(latch.await(20, TimeUnit.SECONDS)); + // Note: the name "processFileOnServiceStart" is a bit misleading because it is not + // referring to fileSettingsService.start(). Rather, it is referring to the initialization + // of the watcher thread itself, which occurs asynchronously when clusterChanged is first called. verify(fileSettingsService, times(1)).processFileOnServiceStart(); verify(controller, times(1)).process(any(), any(XContentParser.class), eq(ReservedStateVersionCheck.HIGHER_OR_SAME_VERSION), any()); // assert we never notified any listeners of successful application of file based settings @@ -312,7 +315,7 @@ public void testInvalidJSON() throws Exception { } finally { awaitOrBust(fileChangeBarrier); } - }).when(fileSettingsService).processFileChanges(); + }).when(fileSettingsService).onProcessFileChangesException(any()); writeTestFile(fileSettingsService.watchedFile(), "test_invalid_JSON"); awaitOrBust(fileChangeBarrier); @@ -326,10 +329,6 @@ public void testInvalidJSON() throws Exception { argThat(e -> unwrapException(e) instanceof XContentParseException) ); - // Note: the name "processFileOnServiceStart" is a bit misleading because it is not - // referring to fileSettingsService.start(). Rather, it is referring to the initialization - // of the watcher thread itself, which occurs asynchronously when clusterChanged is first called. - assertEquals(YELLOW, healthIndicatorService.calculate(false, null).status()); verify(healthIndicatorService, Mockito.atLeast(1)).failureOccurred(contains(XContentParseException.class.getName())); } diff --git a/server/src/test/java/org/elasticsearch/snapshots/RestoreServiceTests.java b/server/src/test/java/org/elasticsearch/snapshots/RestoreServiceTests.java index 1060654da14da..0213ee9046462 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/RestoreServiceTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/RestoreServiceTests.java @@ -76,7 +76,7 @@ public void testUpdateDataStream() { assertEquals(dataStreamName, updateDataStream.getName()); assertEquals(List.of(updatedBackingIndex), updateDataStream.getIndices()); - assertEquals(List.of(updatedFailureIndex), updateDataStream.getFailureIndices().getIndices()); + assertEquals(List.of(updatedFailureIndex), updateDataStream.getFailureIndices()); } public void testUpdateDataStreamRename() { @@ -112,7 +112,7 @@ public void testUpdateDataStreamRename() { assertEquals(renamedDataStreamName, renamedDataStream.getName()); assertEquals(List.of(renamedBackingIndex), renamedDataStream.getIndices()); - assertEquals(List.of(renamedFailureIndex), renamedDataStream.getFailureIndices().getIndices()); + assertEquals(List.of(renamedFailureIndex), renamedDataStream.getFailureIndices()); } public void testPrefixNotChanged() { @@ -147,7 +147,7 @@ public void testPrefixNotChanged() { assertEquals(renamedDataStreamName, renamedDataStream.getName()); assertEquals(List.of(renamedIndex), renamedDataStream.getIndices()); - assertEquals(List.of(renamedFailureIndex), renamedDataStream.getFailureIndices().getIndices()); + assertEquals(List.of(renamedFailureIndex), renamedDataStream.getFailureIndices()); request = new RestoreSnapshotRequest(TEST_REQUEST_TIMEOUT).renamePattern("ds-000001").renameReplacement("ds2-000001"); @@ -155,7 +155,7 @@ public void testPrefixNotChanged() { assertEquals(renamedDataStreamName, renamedDataStream.getName()); assertEquals(List.of(renamedIndex), renamedDataStream.getIndices()); - assertEquals(List.of(renamedFailureIndex), renamedDataStream.getFailureIndices().getIndices()); + assertEquals(List.of(renamedFailureIndex), renamedDataStream.getFailureIndices()); } public void testRefreshRepositoryUuidsDoesNothingIfDisabled() { diff --git a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java index 2eceaa8e421e4..af430b2d18c51 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportHandshakerTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.threadpool.TestThreadPool; import java.io.IOException; @@ -51,7 +52,7 @@ public void setUp() throws Exception { .address("host", "host_address", buildNewFakeTransportAddress()) .roles(Collections.emptySet()) .build(); - threadPool = new TestThreadPool("thread-poll"); + threadPool = new TestThreadPool(getTestName()); handshaker = new TransportHandshaker(TransportVersion.current(), threadPool, requestSender, false); } @@ -85,6 +86,37 @@ public void testHandshakeRequestAndResponse() throws IOException { assertEquals(TransportVersion.current(), versionFuture.actionGet()); } + public void testHandshakeResponseFromOlderNode() throws Exception { + final PlainActionFuture versionFuture = new PlainActionFuture<>(); + final long reqId = randomNonNegativeLong(); + handshaker.sendHandshake(reqId, node, channel, SAFE_AWAIT_TIMEOUT, versionFuture); + TransportResponseHandler handler = handshaker.removeHandlerForHandshake(reqId); + + assertFalse(versionFuture.isDone()); + + final var remoteVersion = TransportVersionUtils.randomCompatibleVersion(random()); + handler.handleResponse(new TransportHandshaker.HandshakeResponse(remoteVersion)); + + assertTrue(versionFuture.isDone()); + assertEquals(remoteVersion, versionFuture.result()); + } + + public void testHandshakeResponseFromNewerNode() throws Exception { + final PlainActionFuture versionFuture = new PlainActionFuture<>(); + final long reqId = randomNonNegativeLong(); + handshaker.sendHandshake(reqId, node, channel, SAFE_AWAIT_TIMEOUT, versionFuture); + TransportResponseHandler handler = handshaker.removeHandlerForHandshake(reqId); + + assertFalse(versionFuture.isDone()); + + handler.handleResponse( + new TransportHandshaker.HandshakeResponse(TransportVersion.fromId(TransportVersion.current().id() + between(0, 10))) + ); + + assertTrue(versionFuture.isDone()); + assertEquals(TransportVersion.current(), versionFuture.result()); + } + public void testHandshakeRequestFutureVersionsCompatibility() throws IOException { long reqId = randomLongBetween(1, 10); handshaker.sendHandshake(reqId, node, channel, new TimeValue(30, TimeUnit.SECONDS), new PlainActionFuture<>()); diff --git a/settings.gradle b/settings.gradle index df428139c92a0..8a15f74dcb286 100644 --- a/settings.gradle +++ b/settings.gradle @@ -89,6 +89,7 @@ List projects = [ 'distribution:tools:ansi-console', 'server', 'test:framework', + 'test:fixtures:aws-ec2-fixture', 'test:fixtures:aws-fixture-utils', 'test:fixtures:aws-sts-fixture', 'test:fixtures:azure-fixture', diff --git a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java index 958132b3e4076..db882ee02efc7 100644 --- a/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java +++ b/test/external-modules/esql-heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/heap_attack/HeapAttackIT.java @@ -23,6 +23,9 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.IndexMode; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.test.ListMatcher; import org.elasticsearch.test.MapMatcher; import org.elasticsearch.test.cluster.ElasticsearchCluster; @@ -42,6 +45,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.function.IntFunction; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -57,8 +61,8 @@ import static org.hamcrest.Matchers.matchesRegex; /** - * Tests that run ESQL queries that have, in the past, used so much memory they - * crash Elasticsearch. + * Tests that run ESQL queries that use a ton of memory. We want to make + * sure they don't consume the entire heap and crash Elasticsearch. */ public class HeapAttackIT extends ESRestTestCase { @ClassRule @@ -624,6 +628,49 @@ private Response fetchMvLongs() throws IOException { return query(query.toString(), "columns"); } + public void testLookupExplosion() throws IOException { + int sensorDataCount = 7500; + int lookupEntries = 10000; + Map map = responseAsMap(lookupExplosion(sensorDataCount, lookupEntries)); + assertMap(map, matchesMap().extraOk().entry("values", List.of(List.of(sensorDataCount * lookupEntries)))); + } + + public void testLookupExplosionManyMatches() throws IOException { + assertCircuitBreaks(() -> lookupExplosion(8500, 10000)); + } + + private Response lookupExplosion(int sensorDataCount, int lookupEntries) throws IOException { + initSensorData(sensorDataCount, 1); + initSensorLookup(lookupEntries, 1, i -> "73.9857 40.7484"); + StringBuilder query = startQuery(); + query.append("FROM sensor_data | LOOKUP JOIN sensor_lookup ON id | STATS COUNT(*)\"}"); + return query(query.toString(), null); + } + + public void testEnrichExplosion() throws IOException { + int sensorDataCount = 1000; + int lookupEntries = 100; + Map map = responseAsMap(enrichExplosion(sensorDataCount, lookupEntries)); + assertMap(map, matchesMap().extraOk().entry("values", List.of(List.of(sensorDataCount)))); + } + + public void testEnrichExplosionManyMatches() throws IOException { + assertCircuitBreaks(() -> enrichExplosion(1000, 10000)); + } + + private Response enrichExplosion(int sensorDataCount, int lookupEntries) throws IOException { + initSensorData(sensorDataCount, 1); + initSensorEnrich(lookupEntries, 1, i -> "73.9857 40.7484"); + try { + StringBuilder query = startQuery(); + query.append("FROM sensor_data | ENRICH sensor ON id | STATS COUNT(*)\"}"); + return query(query.toString(), null); + } finally { + Request delete = new Request("DELETE", "/_enrich/policy/sensor"); + assertMap(responseAsMap(client().performRequest(delete)), matchesMap().entry("acknowledged", true)); + } + } + private void initManyLongs() throws IOException { logger.info("loading many documents with longs"); StringBuilder bulk = new StringBuilder(); @@ -647,7 +694,7 @@ private void initManyLongs() throws IOException { } private void initSingleDocIndex() throws IOException { - logger.info("loading many documents with a single document"); + logger.info("loading a single document"); initIndex("single", """ {"create":{}} {"a":1} @@ -730,6 +777,77 @@ private void initMvLongsIndex(int docs, int fields, int fieldValues) throws IOEx initIndex("mv_longs", bulk.toString()); } + private void initSensorData(int docCount, int sensorCount) throws IOException { + logger.info("loading sensor data"); + createIndex("sensor_data", Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.LOOKUP.getName()).build(), """ + { + "properties": { + "@timestamp": { "type": "date" }, + "id": { "type": "long" }, + "value": { "type": "double" } + } + }"""); + int docsPerBulk = 1000; + long firstDate = DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parseMillis("2025-01-01T00:00:00Z"); + + StringBuilder data = new StringBuilder(); + for (int i = 0; i < docCount; i++) { + data.append(String.format(Locale.ROOT, """ + {"create":{}} + {"timestamp":"%s", "id": %d, "value": %f} + """, DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.formatMillis(i * 10L + firstDate), i % sensorCount, i * 1.1)); + if (i % docsPerBulk == docsPerBulk - 1) { + bulk("sensor_data", data.toString()); + data.setLength(0); + } + } + initIndex("sensor_data", data.toString()); + } + + private void initSensorLookup(int lookupEntries, int sensorCount, IntFunction location) throws IOException { + logger.info("loading sensor lookup"); + createIndex("sensor_lookup", Settings.builder().put(IndexSettings.MODE.getKey(), IndexMode.LOOKUP.getName()).build(), """ + { + "properties": { + "id": { "type": "long" }, + "location": { "type": "geo_point" } + } + }"""); + int docsPerBulk = 1000; + StringBuilder data = new StringBuilder(); + for (int i = 0; i < lookupEntries; i++) { + int sensor = i % sensorCount; + data.append(String.format(Locale.ROOT, """ + {"create":{}} + {"id": %d, "location": "POINT(%s)"} + """, sensor, location.apply(sensor))); + if (i % docsPerBulk == docsPerBulk - 1) { + bulk("sensor_lookup", data.toString()); + data.setLength(0); + } + } + initIndex("sensor_lookup", data.toString()); + } + + private void initSensorEnrich(int lookupEntries, int sensorCount, IntFunction location) throws IOException { + initSensorLookup(lookupEntries, sensorCount, location); + logger.info("loading sensor enrich"); + + Request create = new Request("PUT", "/_enrich/policy/sensor"); + create.setJsonEntity(""" + { + "match": { + "indices": "sensor_lookup", + "match_field": "id", + "enrich_fields": ["location"] + } + } + """); + assertMap(responseAsMap(client().performRequest(create)), matchesMap().entry("acknowledged", true)); + Request execute = new Request("POST", "/_enrich/policy/sensor/_execute"); + assertMap(responseAsMap(client().performRequest(execute)), matchesMap().entry("status", Map.of("phase", "COMPLETE"))); + } + private void bulk(String name, String bulk) throws IOException { Request request = new Request("POST", "/" + name + "/_bulk"); request.setJsonEntity(bulk); diff --git a/test/fixtures/aws-ec2-fixture/build.gradle b/test/fixtures/aws-ec2-fixture/build.gradle new file mode 100644 index 0000000000000..43898cfd299c1 --- /dev/null +++ b/test/fixtures/aws-ec2-fixture/build.gradle @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +apply plugin: 'elasticsearch.java' + +description = 'Fixture for emulating the EC2 DescribeInstances API running in AWS' + +dependencies { + api project(':server') + api("junit:junit:${versions.junit}") { + transitive = false + } + api project(':test:framework') + api project(':test:fixtures:aws-fixture-utils') +} diff --git a/test/fixtures/aws-ec2-fixture/src/main/java/fixture/aws/ec2/AwsEc2HttpFixture.java b/test/fixtures/aws-ec2-fixture/src/main/java/fixture/aws/ec2/AwsEc2HttpFixture.java new file mode 100644 index 0000000000000..ad4f48939009c --- /dev/null +++ b/test/fixtures/aws-ec2-fixture/src/main/java/fixture/aws/ec2/AwsEc2HttpFixture.java @@ -0,0 +1,61 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package fixture.aws.ec2; + +import com.sun.net.httpserver.HttpServer; + +import org.junit.rules.ExternalResource; + +import java.net.InetAddress; +import java.net.InetSocketAddress; +import java.net.UnknownHostException; +import java.util.List; +import java.util.Objects; +import java.util.function.BiPredicate; +import java.util.function.Supplier; + +public class AwsEc2HttpFixture extends ExternalResource { + + private HttpServer server; + + private final Supplier> transportAddressesSupplier; + private final BiPredicate authorizationPredicate; + + public AwsEc2HttpFixture(BiPredicate authorizationPredicate, Supplier> transportAddressesSupplier) { + this.authorizationPredicate = Objects.requireNonNull(authorizationPredicate); + this.transportAddressesSupplier = Objects.requireNonNull(transportAddressesSupplier); + } + + public String getAddress() { + return "http://" + server.getAddress().getHostString() + ":" + server.getAddress().getPort(); + } + + public void stop(int delay) { + server.stop(delay); + } + + protected void before() throws Throwable { + server = HttpServer.create(resolveAddress(), 0); + server.createContext("/", new AwsEc2HttpHandler(authorizationPredicate, transportAddressesSupplier)); + server.start(); + } + + @Override + protected void after() { + stop(0); + } + + private static InetSocketAddress resolveAddress() { + try { + return new InetSocketAddress(InetAddress.getByName("localhost"), 0); + } catch (UnknownHostException e) { + throw new RuntimeException(e); + } + } +} diff --git a/test/fixtures/aws-ec2-fixture/src/main/java/fixture/aws/ec2/AwsEc2HttpHandler.java b/test/fixtures/aws-ec2-fixture/src/main/java/fixture/aws/ec2/AwsEc2HttpHandler.java new file mode 100644 index 0000000000000..5752762325dd0 --- /dev/null +++ b/test/fixtures/aws-ec2-fixture/src/main/java/fixture/aws/ec2/AwsEc2HttpHandler.java @@ -0,0 +1,189 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ +package fixture.aws.ec2; + +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpHandler; + +import org.apache.http.client.utils.URLEncodedUtils; +import org.elasticsearch.ExceptionsHelper; +import org.elasticsearch.core.SuppressForbidden; +import org.elasticsearch.rest.RestStatus; + +import java.io.IOException; +import java.io.StringWriter; +import java.util.HashMap; +import java.util.List; +import java.util.Objects; +import java.util.function.BiPredicate; +import java.util.function.Supplier; + +import javax.xml.XMLConstants; +import javax.xml.stream.XMLOutputFactory; +import javax.xml.stream.XMLStreamWriter; + +import static fixture.aws.AwsCredentialsUtils.checkAuthorization; +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.elasticsearch.test.ESTestCase.randomIdentifier; +import static org.junit.Assert.assertNull; + +/** + * Minimal HTTP handler that emulates the AWS EC2 endpoint (at least, just the DescribeInstances action therein) + */ +@SuppressForbidden(reason = "this test uses a HttpServer to emulate the AWS EC2 endpoint") +public class AwsEc2HttpHandler implements HttpHandler { + + private final BiPredicate authorizationPredicate; + private final Supplier> transportAddressesSupplier; + + public AwsEc2HttpHandler(BiPredicate authorizationPredicate, Supplier> transportAddressesSupplier) { + this.authorizationPredicate = Objects.requireNonNull(authorizationPredicate); + this.transportAddressesSupplier = Objects.requireNonNull(transportAddressesSupplier); + } + + @Override + public void handle(final HttpExchange exchange) throws IOException { + try (exchange) { + + if ("POST".equals(exchange.getRequestMethod()) && "/".equals(exchange.getRequestURI().getPath())) { + + if (checkAuthorization(authorizationPredicate, exchange) == false) { + return; + } + + final var parsedRequest = new HashMap(); + for (final var nameValuePair : URLEncodedUtils.parse(new String(exchange.getRequestBody().readAllBytes(), UTF_8), UTF_8)) { + assertNull(nameValuePair.getName(), parsedRequest.put(nameValuePair.getName(), nameValuePair.getValue())); + } + + if ("DescribeInstances".equals(parsedRequest.get("Action")) == false) { + throw new UnsupportedOperationException(parsedRequest.toString()); + } + + final var responseBody = generateDescribeInstancesResponse(); + exchange.getResponseHeaders().add("Content-Type", "text/xml; charset=UTF-8"); + exchange.sendResponseHeaders(RestStatus.OK.getStatus(), responseBody.length); + exchange.getResponseBody().write(responseBody); + return; + } + + throw new UnsupportedOperationException("can only handle DescribeInstances requests"); + + } catch (Exception e) { + ExceptionsHelper.maybeDieOnAnotherThread(new AssertionError(e)); + } + } + + private static final String XML_NAMESPACE = "http://ec2.amazonaws.com/doc/2013-02-01/"; + + private byte[] generateDescribeInstancesResponse() { + final XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newFactory(); + xmlOutputFactory.setProperty(XMLOutputFactory.IS_REPAIRING_NAMESPACES, true); + + final StringWriter out = new StringWriter(); + XMLStreamWriter sw; + try { + sw = xmlOutputFactory.createXMLStreamWriter(out); + sw.writeStartDocument(); + + sw.setDefaultNamespace(XML_NAMESPACE); + sw.writeStartElement(XMLConstants.DEFAULT_NS_PREFIX, "DescribeInstancesResponse", XML_NAMESPACE); + { + sw.writeStartElement("requestId"); + sw.writeCharacters(randomIdentifier()); + sw.writeEndElement(); + + sw.writeStartElement("reservationSet"); + { + for (final var address : transportAddressesSupplier.get()) { + + sw.writeStartElement("item"); + { + sw.writeStartElement("reservationId"); + sw.writeCharacters(randomIdentifier()); + sw.writeEndElement(); + + sw.writeStartElement("instancesSet"); + { + sw.writeStartElement("item"); + { + sw.writeStartElement("instanceId"); + sw.writeCharacters(randomIdentifier()); + sw.writeEndElement(); + + sw.writeStartElement("imageId"); + sw.writeCharacters(randomIdentifier()); + sw.writeEndElement(); + + sw.writeStartElement("instanceState"); + { + sw.writeStartElement("code"); + sw.writeCharacters("16"); + sw.writeEndElement(); + + sw.writeStartElement("name"); + sw.writeCharacters("running"); + sw.writeEndElement(); + } + sw.writeEndElement(); + + sw.writeStartElement("privateDnsName"); + sw.writeCharacters(address); + sw.writeEndElement(); + + sw.writeStartElement("dnsName"); + sw.writeCharacters(address); + sw.writeEndElement(); + + sw.writeStartElement("instanceType"); + sw.writeCharacters("m1.medium"); // TODO randomize + sw.writeEndElement(); + + sw.writeStartElement("placement"); + { + sw.writeStartElement("availabilityZone"); + sw.writeCharacters(randomIdentifier()); + sw.writeEndElement(); + + sw.writeEmptyElement("groupName"); + + sw.writeStartElement("tenancy"); + sw.writeCharacters("default"); + sw.writeEndElement(); + } + sw.writeEndElement(); + + sw.writeStartElement("privateIpAddress"); + sw.writeCharacters(address); + sw.writeEndElement(); + + sw.writeStartElement("ipAddress"); + sw.writeCharacters(address); + sw.writeEndElement(); + } + sw.writeEndElement(); + } + sw.writeEndElement(); + } + sw.writeEndElement(); + } + sw.writeEndElement(); + } + sw.writeEndElement(); + + sw.writeEndDocument(); + sw.flush(); + } + } catch (Exception e) { + ExceptionsHelper.maybeDieOnAnotherThread(new AssertionError(e)); + throw new RuntimeException(e); + } + return out.toString().getBytes(UTF_8); + } +} diff --git a/test/fixtures/aws-ec2-fixture/src/test/java/fixture/aws/ec2/AwsEc2HttpHandlerTests.java b/test/fixtures/aws-ec2-fixture/src/test/java/fixture/aws/ec2/AwsEc2HttpHandlerTests.java new file mode 100644 index 0000000000000..08a2dd85de254 --- /dev/null +++ b/test/fixtures/aws-ec2-fixture/src/test/java/fixture/aws/ec2/AwsEc2HttpHandlerTests.java @@ -0,0 +1,211 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package fixture.aws.ec2; + +import com.sun.net.httpserver.Headers; +import com.sun.net.httpserver.HttpContext; +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpPrincipal; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.BytesStreamOutput; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.net.InetSocketAddress; +import java.net.URI; +import java.util.HashSet; +import java.util.List; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import javax.xml.namespace.QName; +import javax.xml.stream.XMLInputFactory; +import javax.xml.stream.XMLStreamConstants; +import javax.xml.stream.XMLStreamException; + +public class AwsEc2HttpHandlerTests extends ESTestCase { + + public void testDescribeInstances() throws IOException, XMLStreamException { + final List addresses = randomList( + 1, + 10, + () -> "10.0." + between(1, 254) + "." + between(1, 254) + ":" + between(1025, 65535) + ); + + final var handler = new AwsEc2HttpHandler((ignored1, ignored2) -> true, () -> addresses); + + final var response = handleRequest(handler); + assertEquals(RestStatus.OK, response.status()); + + final var unseenAddressesInTags = Stream.of("privateDnsName", "dnsName", "privateIpAddress", "ipAddress") + .collect( + Collectors.toMap( + localName -> new QName("http://ec2.amazonaws.com/doc/2013-02-01/", localName), + localName -> new HashSet<>(addresses) + ) + ); + + final var xmlStreamReader = XMLInputFactory.newDefaultFactory().createXMLStreamReader(response.body().streamInput()); + try { + for (; xmlStreamReader.getEventType() != XMLStreamConstants.END_DOCUMENT; xmlStreamReader.next()) { + if (xmlStreamReader.getEventType() == XMLStreamConstants.START_ELEMENT) { + final var unseenAddresses = unseenAddressesInTags.get(xmlStreamReader.getName()); + if (unseenAddresses != null) { + xmlStreamReader.next(); + assertEquals(XMLStreamConstants.CHARACTERS, xmlStreamReader.getEventType()); + final var currentAddress = xmlStreamReader.getText(); + assertTrue(currentAddress, unseenAddresses.remove(currentAddress)); + } + } + } + } finally { + xmlStreamReader.close(); + } + + assertTrue(unseenAddressesInTags.toString(), unseenAddressesInTags.values().stream().allMatch(HashSet::isEmpty)); + } + + private record TestHttpResponse(RestStatus status, BytesReference body) {} + + private static TestHttpResponse handleRequest(AwsEc2HttpHandler handler) { + final var httpExchange = new TestHttpExchange( + "POST", + "/", + new BytesArray("Action=DescribeInstances"), + TestHttpExchange.EMPTY_HEADERS + ); + try { + handler.handle(httpExchange); + } catch (IOException e) { + fail(e); + } + assertNotEquals(0, httpExchange.getResponseCode()); + return new TestHttpResponse(RestStatus.fromCode(httpExchange.getResponseCode()), httpExchange.getResponseBodyContents()); + } + + private static class TestHttpExchange extends HttpExchange { + + private static final Headers EMPTY_HEADERS = new Headers(); + + private final String method; + private final URI uri; + private final BytesReference requestBody; + private final Headers requestHeaders; + + private final Headers responseHeaders = new Headers(); + private final BytesStreamOutput responseBody = new BytesStreamOutput(); + private int responseCode; + + TestHttpExchange(String method, String uri, BytesReference requestBody, Headers requestHeaders) { + this.method = method; + this.uri = URI.create(uri); + this.requestBody = requestBody; + this.requestHeaders = requestHeaders; + } + + @Override + public Headers getRequestHeaders() { + return requestHeaders; + } + + @Override + public Headers getResponseHeaders() { + return responseHeaders; + } + + @Override + public URI getRequestURI() { + return uri; + } + + @Override + public String getRequestMethod() { + return method; + } + + @Override + public HttpContext getHttpContext() { + return null; + } + + @Override + public void close() {} + + @Override + public InputStream getRequestBody() { + try { + return requestBody.streamInput(); + } catch (IOException e) { + throw new AssertionError(e); + } + } + + @Override + public OutputStream getResponseBody() { + return responseBody; + } + + @Override + public void sendResponseHeaders(int rCode, long responseLength) { + this.responseCode = rCode; + } + + @Override + public InetSocketAddress getRemoteAddress() { + return null; + } + + @Override + public int getResponseCode() { + return responseCode; + } + + public BytesReference getResponseBodyContents() { + return responseBody.bytes(); + } + + @Override + public InetSocketAddress getLocalAddress() { + return null; + } + + @Override + public String getProtocol() { + return "HTTP/1.1"; + } + + @Override + public Object getAttribute(String name) { + return null; + } + + @Override + public void setAttribute(String name, Object value) { + fail("setAttribute not implemented"); + } + + @Override + public void setStreams(InputStream i, OutputStream o) { + fail("setStreams not implemented"); + } + + @Override + public HttpPrincipal getPrincipal() { + fail("getPrincipal not implemented"); + throw new UnsupportedOperationException("getPrincipal not implemented"); + } + } + +} diff --git a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/ResponseInjectingAzureHttpHandler.java b/test/framework/src/main/java/org/elasticsearch/http/ResponseInjectingHttpHandler.java similarity index 57% rename from modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/ResponseInjectingAzureHttpHandler.java rename to test/framework/src/main/java/org/elasticsearch/http/ResponseInjectingHttpHandler.java index 108d8bc286972..d626401755eb2 100644 --- a/modules/repository-azure/src/test/java/org/elasticsearch/repositories/azure/ResponseInjectingAzureHttpHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/http/ResponseInjectingHttpHandler.java @@ -7,9 +7,8 @@ * License v3.0 only", or the "Server Side Public License, v 1". */ -package org.elasticsearch.repositories.azure; +package org.elasticsearch.http; -import com.sun.net.httpserver.Headers; import com.sun.net.httpserver.HttpExchange; import com.sun.net.httpserver.HttpHandler; @@ -19,21 +18,18 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.util.ArrayList; -import java.util.List; import java.util.Queue; import java.util.function.Predicate; -@SuppressForbidden(reason = "we use a HttpServer to emulate Azure") -class ResponseInjectingAzureHttpHandler implements ESMockAPIBasedRepositoryIntegTestCase.DelegatingHttpHandler { +@SuppressForbidden(reason = "We use HttpServer for the fixtures") +public class ResponseInjectingHttpHandler implements ESMockAPIBasedRepositoryIntegTestCase.DelegatingHttpHandler { private final HttpHandler delegate; private final Queue requestHandlerQueue; - ResponseInjectingAzureHttpHandler(Queue requestHandlerQueue, HttpHandler delegate) { + public ResponseInjectingHttpHandler(Queue requestHandlerQueue, HttpHandler delegate) { this.delegate = delegate; this.requestHandlerQueue = requestHandlerQueue; - AzureBlobContainerStatsTests test = new AzureBlobContainerStatsTests(); } @Override @@ -51,38 +47,9 @@ public HttpHandler getDelegate() { return delegate; } - /** - * Creates a {@link ResponseInjectingAzureHttpHandler.RequestHandler} that will persistently fail the first numberToFail - * distinct requests it sees. Any other requests are passed through to the delegate. - * - * @param numberToFail The number of requests to fail - * @return the handler - */ - static ResponseInjectingAzureHttpHandler.RequestHandler createFailNRequestsHandler(int numberToFail) { - final List requestsToFail = new ArrayList<>(numberToFail); - return (exchange, delegate) -> { - final Headers requestHeaders = exchange.getRequestHeaders(); - final String requestId = requestHeaders.get("X-ms-client-request-id").get(0); - boolean failRequest = false; - synchronized (requestsToFail) { - if (requestsToFail.contains(requestId)) { - failRequest = true; - } else if (requestsToFail.size() < numberToFail) { - requestsToFail.add(requestId); - failRequest = true; - } - } - if (failRequest) { - exchange.sendResponseHeaders(500, -1); - } else { - delegate.handle(exchange); - } - }; - } - - @SuppressForbidden(reason = "we use a HttpServer to emulate Azure") + @SuppressForbidden(reason = "We use HttpServer for the fixtures") @FunctionalInterface - interface RequestHandler { + public interface RequestHandler { void writeResponse(HttpExchange exchange, HttpHandler delegate) throws IOException; default boolean matchesRequest(HttpExchange exchange) { @@ -90,14 +57,14 @@ default boolean matchesRequest(HttpExchange exchange) { } } - @SuppressForbidden(reason = "we use a HttpServer to emulate Azure") - static class FixedRequestHandler implements RequestHandler { + @SuppressForbidden(reason = "We use HttpServer for the fixtures") + public static class FixedRequestHandler implements RequestHandler { private final RestStatus status; private final String responseBody; private final Predicate requestMatcher; - FixedRequestHandler(RestStatus status) { + public FixedRequestHandler(RestStatus status) { this(status, null, req -> true); } @@ -106,7 +73,7 @@ static class FixedRequestHandler implements RequestHandler { * that because the errors are stored in a queue this will prevent any subsequently queued errors from * being returned until after it returns. */ - FixedRequestHandler(RestStatus status, String responseBody, Predicate requestMatcher) { + public FixedRequestHandler(RestStatus status, String responseBody, Predicate requestMatcher) { this.status = status; this.responseBody = responseBody; this.requestMatcher = requestMatcher; @@ -121,7 +88,7 @@ public boolean matchesRequest(HttpExchange exchange) { public void writeResponse(HttpExchange exchange, HttpHandler delegateHandler) throws IOException { if (responseBody != null) { byte[] responseBytes = responseBody.getBytes(StandardCharsets.UTF_8); - exchange.sendResponseHeaders(status.getStatus(), responseBytes.length); + exchange.sendResponseHeaders(status.getStatus(), responseBytes.length == 0 ? -1 : responseBytes.length); exchange.getResponseBody().write(responseBytes); } else { exchange.sendResponseHeaders(status.getStatus(), -1); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java index 8aa6ea4bc9e26..c7007ac60fe57 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalClusterFactory.java @@ -78,6 +78,8 @@ public abstract class AbstractLocalClusterFactory getAvailableTransportEndpoints() { + Path portsFile = workingDir.resolve("logs").resolve("transport.ports"); + if (Files.notExists(portsFile)) { + // Ok if missing, we're only returning the _available_ transport endpoints and the node might not yet be started up. + // If we're using this for discovery then we'll retry until we see enough running nodes to form the cluster. + return List.of(); + } + return readPortsFile(portsFile); + } + public String getRemoteClusterServerEndpoint() { if (spec.isRemoteClusterServerEnabled()) { Path portsFile = workingDir.resolve("logs").resolve("remote_cluster.ports"); @@ -662,7 +678,7 @@ private void installPlugins() { .findFirst() .map(path -> { DefaultPluginInstallSpec installSpec = plugin.getValue(); - // Path the plugin archive with configured overrides if necessary + // Patch the plugin archive with configured overrides if necessary if (installSpec.entitlementsOverride != null || installSpec.propertiesOverride != null) { Path target; try { @@ -673,13 +689,13 @@ private void installPlugins() { ArchivePatcher patcher = new ArchivePatcher(path, target); if (installSpec.entitlementsOverride != null) { patcher.override( - "entitlement-policy.yaml", + ENTITLEMENT_POLICY_YAML, original -> installSpec.entitlementsOverride.apply(original).asStream() ); } if (installSpec.propertiesOverride != null) { patcher.override( - "plugin-descriptor.properties", + PLUGIN_DESCRIPTOR_PROPERTIES, original -> installSpec.propertiesOverride.apply(original).asStream() ); } @@ -729,11 +745,11 @@ private void installModules() { .map(Path::of) .toList(); - spec.getModules().forEach(module -> installModule(module, modulePaths)); + spec.getModules().forEach((module, spec) -> installModule(module, spec, modulePaths)); } } - private void installModule(String moduleName, List modulePaths) { + private void installModule(String moduleName, DefaultPluginInstallSpec installSpec, List modulePaths) { Path destination = distributionDir.resolve("modules").resolve(moduleName); if (Files.notExists(destination)) { Path modulePath = modulePaths.stream().filter(path -> path.endsWith(moduleName)).findFirst().orElseThrow(() -> { @@ -743,7 +759,7 @@ private void installModule(String moduleName, List modulePaths) { ? "project(xpackModule('" + moduleName.substring(7) + "'))" : "project(':modules:" + moduleName + "')"; - throw new RuntimeException( + return new RuntimeException( "Unable to locate module '" + moduleName + "'. Ensure you've added the following to the build script for project '" @@ -758,20 +774,34 @@ private void installModule(String moduleName, List modulePaths) { }); IOUtils.syncWithCopy(modulePath, destination); + try { + if (installSpec.entitlementsOverride != null) { + Path entitlementsFile = modulePath.resolve(ENTITLEMENT_POLICY_YAML); + String original = Files.exists(entitlementsFile) ? Files.readString(entitlementsFile) : ""; + Path target = destination.resolve(ENTITLEMENT_POLICY_YAML); + installSpec.entitlementsOverride.apply(original).writeTo(target); + } + if (installSpec.propertiesOverride != null) { + Path propertiesFiles = modulePath.resolve(PLUGIN_DESCRIPTOR_PROPERTIES); + String original = Files.exists(propertiesFiles) ? Files.readString(propertiesFiles) : ""; + Path target = destination.resolve(PLUGIN_DESCRIPTOR_PROPERTIES); + installSpec.propertiesOverride.apply(original).writeTo(target); + } + } catch (IOException e) { + throw new UncheckedIOException("Error patching module '" + moduleName + "'", e); + } - // Install any extended plugins + // Install any extended modules Properties pluginProperties = new Properties(); try ( - InputStream in = new BufferedInputStream( - new FileInputStream(modulePath.resolve("plugin-descriptor.properties").toFile()) - ) + InputStream in = new BufferedInputStream(new FileInputStream(modulePath.resolve(PLUGIN_DESCRIPTOR_PROPERTIES).toFile())) ) { pluginProperties.load(in); String extendedProperty = pluginProperties.getProperty("extended.plugins"); if (extendedProperty != null) { - String[] extendedPlugins = extendedProperty.split(","); - for (String plugin : extendedPlugins) { - installModule(plugin, modulePaths); + String[] extendedModules = extendedProperty.split(","); + for (String module : extendedModules) { + installModule(module, new DefaultPluginInstallSpec(), modulePaths); } } } catch (IOException e) { diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalSpecBuilder.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalSpecBuilder.java index 1ef4bcbfb6120..9617cb633aa9b 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalSpecBuilder.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/AbstractLocalSpecBuilder.java @@ -34,7 +34,7 @@ public abstract class AbstractLocalSpecBuilder> im private final Map settings = new HashMap<>(); private final List environmentProviders = new ArrayList<>(); private final Map environment = new HashMap<>(); - private final Set modules = new HashSet<>(); + private final Map modules = new HashMap<>(); private final Map plugins = new HashMap<>(); private final Set features = EnumSet.noneOf(FeatureFlag.class); private final List keystoreProviders = new ArrayList<>(); @@ -123,11 +123,19 @@ DistributionType getDistributionType() { @Override public T module(String moduleName) { - this.modules.add(moduleName); + this.modules.put(moduleName, new DefaultPluginInstallSpec()); return cast(this); } - Set getModules() { + @Override + public T module(String moduleName, Consumer config) { + DefaultPluginInstallSpec spec = new DefaultPluginInstallSpec(); + config.accept(spec); + this.modules.put(moduleName, spec); + return cast(this); + } + + Map getModules() { return inherit(() -> parent.getModules(), modules); } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java index 13adde1da8a69..4331728aedb1e 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalClusterHandle.java @@ -24,6 +24,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.time.Duration; +import java.util.ArrayList; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; @@ -132,6 +133,19 @@ public String getTransportEndpoints() { return execute(() -> nodes.parallelStream().map(Node::getTransportEndpoint).collect(Collectors.joining(","))); } + @Override + public List getAvailableTransportEndpoints() { + final var results = new ArrayList(nodes.size() * 2); // *2 because each node has both IPv4 and IPv6 addresses + for (final var node : nodes) { + try { + results.addAll(node.getAvailableTransportEndpoints()); + } catch (Exception e) { + LOGGER.warn("failure reading available transport endpoints from [{}]", node.getName(), e); + } + } + return results; + } + @Override public String getTransportEndpoint(int index) { return getTransportEndpoints().split(",")[index]; @@ -252,6 +266,13 @@ private void writeUnicastHostsFile() { String transportUris = execute(() -> nodes.parallelStream().map(Node::getTransportEndpoint).collect(Collectors.joining("\n"))); execute(() -> nodes.parallelStream().forEach(node -> { try { + if (node.getSpec().getPlugins().containsKey("discovery-ec2")) { + // If we're using (i.e. testing) a discovery plugin then suppress the file-based discovery mechanism, to make sure the + // test does not pass spuriously by using file-based discovery. + // TODO find a way to do this without just hard-coding the plugin name here. + LOGGER.info("Skipping writing unicast hosts file for node {}", node.getName()); + return; + } Path hostsFile = node.getWorkingDir().resolve("config").resolve("unicast_hosts.txt"); LOGGER.info("Writing unicast hosts file {} for node {}", hostsFile, node.getName()); Files.writeString(hostsFile, transportUris); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java index 3853c60687a0d..fca525a2b4d04 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/DefaultLocalElasticsearchCluster.java @@ -19,6 +19,7 @@ import java.lang.annotation.Annotation; import java.lang.reflect.InvocationTargetException; import java.util.Arrays; +import java.util.List; import java.util.function.Supplier; public class DefaultLocalElasticsearchCluster implements ElasticsearchCluster { @@ -126,6 +127,12 @@ public String getTransportEndpoints() { return handle.getTransportEndpoints(); } + @Override + public List getAvailableTransportEndpoints() { + checkHandle(); + return handle.getAvailableTransportEndpoints(); + } + @Override public String getTransportEndpoint(int index) { checkHandle(); diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java index acceb14e6460e..7c55da3a4e1b9 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterHandle.java @@ -15,6 +15,7 @@ import org.elasticsearch.test.cluster.util.Version; import java.io.InputStream; +import java.util.List; public interface LocalClusterHandle extends ClusterHandle { @@ -54,6 +55,11 @@ public interface LocalClusterHandle extends ClusterHandle { */ String getTransportEndpoints(); + /** + * @return a list of all available TCP transport endpoints, which may be empty if none of the nodes in this cluster are started. + */ + List getAvailableTransportEndpoints(); + /** * Returns the TCP transport endpoint for the node at the given index. If this method is called on an unstarted cluster, the cluster * will be started. This method is thread-safe and subsequent calls will wait for cluster start and availability. diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java index b9e9520e77ebb..ed5c0c5d1bbc0 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalClusterSpec.java @@ -91,7 +91,7 @@ public static class LocalNodeSpec { private final Map settings; private final List environmentProviders; private final Map environment; - private final Set modules; + private final Map modules; private final Map plugins; private final DistributionType distributionType; private final Set features; @@ -113,7 +113,7 @@ public LocalNodeSpec( Map settings, List environmentProviders, Map environment, - Set modules, + Map modules, Map plugins, DistributionType distributionType, Set features, @@ -175,7 +175,7 @@ public DistributionType getDistributionType() { return distributionType; } - public Set getModules() { + public Map getModules() { return modules; } diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalSpecBuilder.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalSpecBuilder.java index 2b44126fef4ee..20c92dc2b11d7 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalSpecBuilder.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/local/LocalSpecBuilder.java @@ -69,6 +69,12 @@ interface LocalSpecBuilder> { */ T module(String moduleName); + /** + * Ensure module is installed into the distribution when using the {@link DistributionType#INTEG_TEST} distribution. This is ignored + * when the {@link DistributionType#DEFAULT} is being used. + */ + T module(String moduleName, Consumer config); + /** * Ensure plugin is installed into the distribution. */ diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/ArchivePatcher.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/ArchivePatcher.java index 269d1dd9f516c..0be5295c53cfb 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/ArchivePatcher.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/util/ArchivePatcher.java @@ -50,15 +50,24 @@ public Path patch() { ZipEntry entry = entries.nextElement(); output.putNextEntry(entry); if (overrides.containsKey(entry.getName())) { + Function override = overrides.remove(entry.getName()); try (BufferedReader reader = new BufferedReader(new InputStreamReader(input.getInputStream(entry)))) { String content = reader.lines().collect(Collectors.joining(System.lineSeparator())); - overrides.get(entry.getName()).apply(content).transferTo(output); + override.apply(content).transferTo(output); } } else { input.getInputStream(entry).transferTo(output); } output.closeEntry(); } + + for (Map.Entry> override : overrides.entrySet()) { + ZipEntry entry = new ZipEntry(override.getKey()); + output.putNextEntry(entry); + override.getValue().apply("").transferTo(output); + output.closeEntry(); + } + output.flush(); output.finish(); } catch (IOException e) { diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchActionIT.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchActionIT.java index 0ecc0fdc81e6b..91d35d79b7c87 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchActionIT.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchActionIT.java @@ -200,7 +200,6 @@ public void testTermsAggregation() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/81941") public void testRestartAfterCompletion() throws Exception { final String initialId; try (SearchResponseIterator it = assertBlockingIterator(indexName, numShards, new SearchSourceBuilder(), 0, 2)) { diff --git a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java index 4c612d5e04886..42caf79fe9a39 100644 --- a/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java +++ b/x-pack/plugin/autoscaling/src/main/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderService.java @@ -818,7 +818,7 @@ private SingleForecast forecast(Metadata metadata, DataStream stream, long forec final String uuid = UUIDs.randomBase64UUID(); final Tuple rolledDataStreamInfo = stream.unsafeNextWriteIndexAndGeneration( state.metadata(), - stream.getBackingIndices() + stream.getDataComponent() ); stream = stream.unsafeRollover( new Index(rolledDataStreamInfo.v1(), uuid), diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java index d8e634a297bfa..32ea3bc1caaa6 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/action/TransportPutFollowAction.java @@ -337,11 +337,11 @@ static DataStream updateLocalDataStream( .setBackingIndices( // Replicated data streams can't be rolled over, so having the `rolloverOnWrite` flag set to `true` wouldn't make sense // (and potentially even break things). - remoteDataStream.getBackingIndices().copy().setIndices(List.of(backingIndexToFollow)).setRolloverOnWrite(false).build() + remoteDataStream.getDataComponent().copy().setIndices(List.of(backingIndexToFollow)).setRolloverOnWrite(false).build() ) // Replicated data streams should not have the failure store marked for lazy rollover (which they do by default for lazy // failure store creation). - .setFailureIndices(remoteDataStream.getFailureIndices().copy().setRolloverOnWrite(false).build()) + .setFailureIndices(remoteDataStream.getFailureComponent().copy().setRolloverOnWrite(false).build()) .setReplicated(true) .build(); } else { @@ -384,7 +384,7 @@ static DataStream updateLocalDataStream( } return localDataStream.copy() - .setBackingIndices(localDataStream.getBackingIndices().copy().setIndices(backingIndices).build()) + .setBackingIndices(localDataStream.getDataComponent().copy().setIndices(backingIndices).build()) .setGeneration(remoteDataStream.getGeneration()) .setMetadata(remoteDataStream.getMetadata()) .build(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/DataStreamUsageTransportAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/DataStreamUsageTransportAction.java index 7eece9177cf2b..f1b3fec97ad3c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/DataStreamUsageTransportAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/DataStreamUsageTransportAction.java @@ -68,8 +68,8 @@ protected void masterOperation( if (ds.isFailureStoreEffectivelyEnabled(dataStreamFailureStoreSettings)) { failureStoreEffectivelyEnabledCounter++; } - if (ds.getFailureIndices().getIndices().isEmpty() == false) { - failureIndicesCounter += ds.getFailureIndices().getIndices().size(); + if (ds.getFailureIndices().isEmpty() == false) { + failureIndicesCounter += ds.getFailureIndices().size(); } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java index 024d24fdf5151..4c8a63ed73866 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/deprecation/DeprecatedIndexPredicate.java @@ -9,6 +9,7 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.MetadataIndexStateService; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.IndexVersions; @@ -33,7 +34,15 @@ public static Predicate getReindexRequiredPredicate(Metadata metadata) { } public static boolean reindexRequired(IndexMetadata indexMetadata) { - return creationVersionBeforeMinimumWritableVersion(indexMetadata) && isNotSearchableSnapshot(indexMetadata); + return creationVersionBeforeMinimumWritableVersion(indexMetadata) + && isNotSearchableSnapshot(indexMetadata) + && isNotClosed(indexMetadata) + && isNotVerifiedReadOnly(indexMetadata); + } + + private static boolean isNotVerifiedReadOnly(IndexMetadata indexMetadata) { + // no need to check blocks. + return MetadataIndexStateService.VERIFIED_READ_ONLY_SETTING.get(indexMetadata.getSettings()) == false; } private static boolean isNotSearchableSnapshot(IndexMetadata indexMetadata) { @@ -44,4 +53,8 @@ private static boolean creationVersionBeforeMinimumWritableVersion(IndexMetadata return metadata.getCreationVersion().before(MINIMUM_WRITEABLE_VERSION_AFTER_UPGRADE); } + private static boolean isNotClosed(IndexMetadata indexMetadata) { + return indexMetadata.getState().equals(IndexMetadata.State.CLOSE) == false; + } + } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichPolicy.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichPolicy.java index 9bbe41b4797fe..61020c7561fb3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichPolicy.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/enrich/EnrichPolicy.java @@ -35,7 +35,7 @@ */ public final class EnrichPolicy implements Writeable, ToXContentFragment { - private static final String ELASTICEARCH_VERSION_DEPRECATION_MESSAGE = + private static final String ELASTICSEARCH_VERSION_DEPRECATION_MESSAGE = "the [elasticsearch_version] field of an enrich policy has no effect and will be removed in a future version of Elasticsearch"; private static final DeprecationLogger deprecationLogger = DeprecationLogger.getLogger(EnrichPolicy.class); @@ -146,7 +146,7 @@ private EnrichPolicy( deprecationLogger.warn( DeprecationCategory.OTHER, "enrich_policy_with_elasticsearch_version", - ELASTICEARCH_VERSION_DEPRECATION_MESSAGE + ELASTICSEARCH_VERSION_DEPRECATION_MESSAGE ); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckNotDataStreamWriteIndexStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckNotDataStreamWriteIndexStep.java index fcb9c78ebefd7..9973f85bc64fa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckNotDataStreamWriteIndexStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/CheckNotDataStreamWriteIndexStep.java @@ -60,7 +60,7 @@ public Result isConditionMet(Index index, ClusterState clusterState) { assert indexAbstraction != null : "invalid cluster metadata. index [" + indexName + "] was not found"; DataStream dataStream = indexAbstraction.getParentDataStream(); if (dataStream != null) { - boolean isFailureStoreWriteIndex = index.equals(dataStream.getFailureStoreWriteIndex()); + boolean isFailureStoreWriteIndex = index.equals(dataStream.getWriteFailureIndex()); if (isFailureStoreWriteIndex || dataStream.getWriteIndex().equals(index)) { String errorMessage = Strings.format( "index [%s] is the%s write index for data stream [%s], pausing " diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteStep.java index 7fb350f13a850..c178b28f08ed6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DeleteStep.java @@ -41,7 +41,7 @@ public void performDuringNoSnapshot(IndexMetadata indexMetadata, ClusterState cu DataStream dataStream = indexAbstraction.getParentDataStream(); if (dataStream != null) { - Index failureStoreWriteIndex = dataStream.getFailureStoreWriteIndex(); + Index failureStoreWriteIndex = dataStream.getWriteFailureIndex(); boolean isFailureStoreWriteIndex = failureStoreWriteIndex != null && indexName.equals(failureStoreWriteIndex.getName()); // using index name equality across this if/else branch as the UUID of the index might change via restoring a data stream diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java index 6ce9e05e4a464..8cd1a4bd03eab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java @@ -203,7 +203,7 @@ public List toSteps(Client client, String phase, StepKey nextStepKey) { Instant::now ); // Mark source index as read-only - ReadOnlyStep readOnlyStep = new ReadOnlyStep(readOnlyKey, generateDownsampleIndexNameKey, client); + ReadOnlyStep readOnlyStep = new ReadOnlyStep(readOnlyKey, generateDownsampleIndexNameKey, client, true); // Before the downsample action was retry-able, we used to generate a unique downsample index name and delete the previous index in // case a failure occurred. The downsample action can now retry execution in case of failure and start where it left off, so no diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java index b36156842acf5..b93bf652b84b4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyAction.java @@ -69,7 +69,7 @@ public List toSteps(Client client, String phase, StepKey nextStepKey) { readOnlyKey, Instant::now ); - ReadOnlyStep readOnlyStep = new ReadOnlyStep(readOnlyKey, nextStepKey, client); + ReadOnlyStep readOnlyStep = new ReadOnlyStep(readOnlyKey, nextStepKey, client, true); return List.of(checkNotWriteIndexStep, waitUntilTimeSeriesEndTimeStep, readOnlyStep); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyStep.java index 208b6bb1b4fd0..2f142d832fc3e 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReadOnlyStep.java @@ -23,9 +23,15 @@ */ public class ReadOnlyStep extends AsyncActionStep { public static final String NAME = "readonly"; + private final boolean markVerified; - public ReadOnlyStep(StepKey key, StepKey nextStepKey, Client client) { + /** + * @param markVerified whether the index should be marked verified after becoming read-only, ensuring that N-2 is supported without + * manual intervention. Should be set to true when the read-only block is not temporary. + */ + public ReadOnlyStep(StepKey key, StepKey nextStepKey, Client client, boolean markVerified) { super(key, nextStepKey, client); + this.markVerified = markVerified; } @Override @@ -39,7 +45,8 @@ public void performAction( .indices() .execute( TransportAddIndexBlockAction.TYPE, - new AddIndexBlockRequest(WRITE, indexMetadata.getIndex().getName()).masterNodeTimeout(TimeValue.MAX_VALUE), + new AddIndexBlockRequest(WRITE, indexMetadata.getIndex().getName()).masterNodeTimeout(TimeValue.MAX_VALUE) + .markVerified(markVerified), listener.delegateFailureAndWrap((l, response) -> { if (response.isAcknowledged() == false) { throw new ElasticsearchException("read only add block index request failed to be acknowledged"); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStep.java index 8bbe68513f425..68143899c0349 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStep.java @@ -85,7 +85,7 @@ public ClusterState performAction(Index index, ClusterState clusterState) { throw new IllegalStateException(errorMessage); } - boolean isFailureStoreWriteIndex = index.equals(dataStream.getFailureStoreWriteIndex()); + boolean isFailureStoreWriteIndex = index.equals(dataStream.getWriteFailureIndex()); if (isFailureStoreWriteIndex || dataStream.getWriteIndex().equals(index)) { String errorMessage = Strings.format( "index [%s] is the%s write index for data stream [%s], pausing " diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java index 4a8788a849b90..dce6704c21eda 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/RolloverStep.java @@ -61,7 +61,7 @@ public void performAction( final boolean targetFailureStore; DataStream dataStream = indexAbstraction.getParentDataStream(); if (dataStream != null) { - boolean isFailureStoreWriteIndex = indexMetadata.getIndex().equals(dataStream.getFailureStoreWriteIndex()); + boolean isFailureStoreWriteIndex = indexMetadata.getIndex().equals(dataStream.getWriteFailureIndex()); targetFailureStore = dataStream.isFailureStoreIndex(indexMetadata.getIndex().getName()); if (isFailureStoreWriteIndex == false && dataStream.getWriteIndex().equals(indexMetadata.getIndex()) == false) { logger.warn( diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java index f7478518613e2..4e7bdcb1197bc 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/ShrinkAction.java @@ -233,7 +233,7 @@ public List toSteps(Client client, String phase, Step.StepKey nextStepKey) readOnlyKey, Instant::now ); - ReadOnlyStep readOnlyStep = new ReadOnlyStep(readOnlyKey, checkTargetShardsCountKey, client); + ReadOnlyStep readOnlyStep = new ReadOnlyStep(readOnlyKey, checkTargetShardsCountKey, client, false); CheckTargetShardsCountStep checkTargetShardsCountStep = new CheckTargetShardsCountStep( checkTargetShardsCountKey, cleanupShrinkIndexKey, diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForActiveShardsStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForActiveShardsStep.java index 989223ef48da7..08709f950f562 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForActiveShardsStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForActiveShardsStep.java @@ -84,7 +84,7 @@ public Result isConditionMet(Index index, ClusterState clusterState) { // Determine which write index we care about right now: final Index rolledIndex; if (dataStream.isFailureStoreIndex(index.getName())) { - rolledIndex = dataStream.getFailureStoreWriteIndex(); + rolledIndex = dataStream.getWriteFailureIndex(); } else { rolledIndex = dataStream.getWriteIndex(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java index be1a9d14cfdd1..7fd702bebc9c6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/WaitForRolloverReadyStep.java @@ -88,7 +88,7 @@ public void evaluateCondition(Metadata metadata, Index index, Listener listener, DataStream dataStream = indexAbstraction.getParentDataStream(); if (dataStream != null) { targetFailureStore = dataStream.isFailureStoreIndex(index.getName()); - boolean isFailureStoreWriteIndex = index.equals(dataStream.getFailureStoreWriteIndex()); + boolean isFailureStoreWriteIndex = index.equals(dataStream.getWriteFailureIndex()); if (isFailureStoreWriteIndex == false && dataStream.getWriteIndex().equals(index) == false) { logger.warn( "index [{}] is not the {}write index for data stream [{}]. skipping rollover for policy [{}]", diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionAction.java index 8d121463fb465..f5c852a0450ae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionAction.java @@ -92,9 +92,9 @@ public ActionRequestValidationException validate() { return e; } - if (taskType.isAnyOrSame(TaskType.COMPLETION) == false) { + if (taskType.isAnyOrSame(TaskType.CHAT_COMPLETION) == false) { var e = new ActionRequestValidationException(); - e.addValidationError("Field [taskType] must be [completion]"); + e.addValidationError("Field [taskType] must be [chat_completion]"); return e; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/AuthorizationEngine.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/AuthorizationEngine.java index 80716c9f7c9df..9d102e6954d04 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/AuthorizationEngine.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/AuthorizationEngine.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.cluster.metadata.IndexAbstraction; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -74,7 +75,7 @@ * can actually impersonate the user running the request. *
  • {@link #authorizeClusterAction(RequestInfo, AuthorizationInfo, ActionListener)} if the * request is a cluster level operation.
  • - *
  • {@link #authorizeIndexAction(RequestInfo, AuthorizationInfo, AsyncSupplier, Map, ActionListener)} if + *
  • {@link #authorizeIndexAction(RequestInfo, AuthorizationInfo, AsyncSupplier, Metadata, ActionListener)} if * the request is a an index action. This method may be called multiple times for a single * request as the request may be made up of sub-requests that also need to be authorized. The async supplier * for resolved indices will invoke the @@ -83,7 +84,7 @@ * *

    * NOTE: the {@link #loadAuthorizedIndices(RequestInfo, AuthorizationInfo, Map, ActionListener)} - * method may be called prior to {@link #authorizeIndexAction(RequestInfo, AuthorizationInfo, AsyncSupplier, Map, ActionListener)} + * method may be called prior to {@link #authorizeIndexAction(RequestInfo, AuthorizationInfo, AsyncSupplier, Metadata, ActionListener)} * in cases where wildcards need to be expanded. *


    * Authorization engines can be called from various threads including network threads that should @@ -157,7 +158,7 @@ public interface AuthorizationEngine { * from {@link #resolveAuthorizationInfo(RequestInfo, ActionListener)} * @param indicesAsyncSupplier the asynchronous supplier for the indices that this request is * attempting to operate on - * @param aliasOrIndexLookup a map of a string name to the cluster metadata specific to that + * @param metadata a map of a string name to the cluster metadata specific to that * alias or index * @param listener the listener to be notified of the authorization result */ @@ -165,7 +166,7 @@ void authorizeIndexAction( RequestInfo requestInfo, AuthorizationInfo authorizationInfo, AsyncSupplier indicesAsyncSupplier, - Map aliasOrIndexLookup, + Metadata metadata, ActionListener listener ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java index 4ea590afff864..b91db5ca34366 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/IndicesPermission.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.logging.DeprecationCategory; @@ -437,13 +438,13 @@ public int size(Map lookup) { for (Index index : indexAbstraction.getIndices()) { DataStream parentDataStream = lookup.get(index.getName()).getParentDataStream(); if (parentDataStream != null && aliasDataStreams.add(parentDataStream)) { - failureIndices += parentDataStream.getFailureIndices().getIndices().size(); + failureIndices += parentDataStream.getFailureIndices().size(); } } size += failureIndices; } else { DataStream parentDataStream = (DataStream) indexAbstraction; - size += parentDataStream.getFailureIndices().getIndices().size(); + size += parentDataStream.getFailureIndices().size(); } } return size; @@ -452,37 +453,18 @@ public int size(Map lookup) { } } - public Collection resolveConcreteIndices(Map lookup) { + public Collection resolveConcreteIndices(Metadata metadata) { if (indexAbstraction == null) { return List.of(); } else if (indexAbstraction.getType() == IndexAbstraction.Type.CONCRETE_INDEX) { return List.of(indexAbstraction.getName()); } else if (IndexComponentSelector.FAILURES.equals(selector)) { - if (IndexAbstraction.Type.ALIAS.equals(indexAbstraction.getType())) { - Set aliasDataStreams = new HashSet<>(); - for (Index index : indexAbstraction.getIndices()) { - DataStream parentDataStream = lookup.get(index.getName()).getParentDataStream(); - if (parentDataStream != null) { - aliasDataStreams.add(parentDataStream); - } - } - List concreteIndexNames = new ArrayList<>(aliasDataStreams.size()); - for (DataStream aliasDataStream : aliasDataStreams) { - DataStream.DataStreamIndices failureIndices = aliasDataStream.getFailureIndices(); - for (Index index : failureIndices.getIndices()) { - concreteIndexNames.add(index.getName()); - } - } - return concreteIndexNames; - } else { - DataStream parentDataStream = (DataStream) indexAbstraction; - DataStream.DataStreamIndices failureIndices = parentDataStream.getFailureIndices(); - List concreteIndexNames = new ArrayList<>(failureIndices.getIndices().size()); - for (Index index : failureIndices.getIndices()) { - concreteIndexNames.add(index.getName()); - } - return concreteIndexNames; + final List failureIndices = indexAbstraction.getFailureIndices(metadata); + final List concreteIndexNames = new ArrayList<>(failureIndices.size()); + for (var idx : failureIndices) { + concreteIndexNames.add(idx.getName()); } + return concreteIndexNames; } else { final List indices = indexAbstraction.getIndices(); final List concreteIndexNames = new ArrayList<>(indices.size()); @@ -504,7 +486,7 @@ public boolean canHaveBackingIndices() { public IndicesAccessControl authorize( String action, Set requestedIndicesOrAliases, - Map lookup, + Metadata metadata, FieldPermissionsCache fieldPermissionsCache ) { // Short circuit if the indicesPermission allows all access to every index @@ -516,7 +498,7 @@ public IndicesAccessControl authorize( final Map resources = Maps.newMapWithExpectedSize(requestedIndicesOrAliases.size()); int totalResourceCount = 0; - + Map lookup = metadata.getIndicesLookup(); for (String indexOrAlias : requestedIndicesOrAliases) { // Remove any selectors from abstraction name. Discard them for this check as we do not have access control for them (yet) Tuple expressionAndSelector = IndexNameExpressionResolver.splitSelectorExpression(indexOrAlias); @@ -536,7 +518,7 @@ public IndicesAccessControl authorize( resources, finalTotalResourceCount, fieldPermissionsCache, - lookup + metadata ); return new IndicesAccessControl(overallGranted, indexPermissions); @@ -547,11 +529,11 @@ private Map buildIndicesAccessC final Map requestedResources, final int totalResourceCount, final FieldPermissionsCache fieldPermissionsCache, - final Map lookup + final Metadata metadata ) { // now... every index that is associated with the request, must be granted - // by at least one indices permission group + // by at least one index permission group final Map> fieldPermissionsByIndex = Maps.newMapWithExpectedSize(totalResourceCount); final Map roleQueriesByIndex = Maps.newMapWithExpectedSize(totalResourceCount); final Set grantedResources = Sets.newHashSetWithExpectedSize(totalResourceCount); @@ -562,7 +544,7 @@ private Map buildIndicesAccessC // true if ANY group covers the given index AND the given action boolean granted = false; - final Collection concreteIndices = resource.resolveConcreteIndices(lookup); + final Collection concreteIndices = resource.resolveConcreteIndices(metadata); for (Group group : groups) { // the group covers the given index OR the given index is a backing index and the group covers the parent data stream if (resource.checkIndex(group)) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java index ea32ba13ae576..010e08b0d4db6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRole.java @@ -11,7 +11,7 @@ import org.apache.logging.log4j.Logger; import org.apache.lucene.util.automaton.Automaton; import org.elasticsearch.TransportVersion; -import org.elasticsearch.cluster.metadata.IndexAbstraction; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; import org.elasticsearch.core.Nullable; import org.elasticsearch.transport.TransportRequest; @@ -28,13 +28,12 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.Map; import java.util.Objects; import java.util.Set; /** * A {@link Role} limited by another role.
    - * The effective permissions returned on {@link #authorize(String, Set, Map, FieldPermissionsCache)} call would be limited by the + * The effective permissions returned on {@link #authorize(String, Set, Metadata, FieldPermissionsCache)} call would be limited by the * provided role. */ public final class LimitedRole implements Role { @@ -139,19 +138,14 @@ public int hashCode() { public IndicesAccessControl authorize( String action, Set requestedIndicesOrAliases, - Map aliasAndIndexLookup, + Metadata metadata, FieldPermissionsCache fieldPermissionsCache ) { - IndicesAccessControl indicesAccessControl = baseRole.authorize( - action, - requestedIndicesOrAliases, - aliasAndIndexLookup, - fieldPermissionsCache - ); + IndicesAccessControl indicesAccessControl = baseRole.authorize(action, requestedIndicesOrAliases, metadata, fieldPermissionsCache); IndicesAccessControl limitedByIndicesAccessControl = limitedByRole.authorize( action, requestedIndicesOrAliases, - aliasAndIndexLookup, + metadata, fieldPermissionsCache ); return indicesAccessControl.limitIndicesAccessControl(limitedByIndicesAccessControl); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java index f52f8f85f006d..fe97b152a2ee7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java @@ -9,7 +9,7 @@ import org.apache.lucene.util.automaton.Automaton; import org.elasticsearch.TransportVersion; -import org.elasticsearch.cluster.metadata.IndexAbstraction; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.core.Nullable; @@ -180,7 +180,7 @@ boolean checkApplicationResourcePrivileges( IndicesAccessControl authorize( String action, Set requestedIndicesOrAliases, - Map aliasAndIndexLookup, + Metadata metadata, FieldPermissionsCache fieldPermissionsCache ); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRole.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRole.java index 0ec9d2a48316a..9b63b73d7801b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRole.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/SimpleRole.java @@ -8,7 +8,7 @@ import org.apache.lucene.util.automaton.Automaton; import org.elasticsearch.TransportVersion; -import org.elasticsearch.cluster.metadata.IndexAbstraction; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; @@ -32,7 +32,6 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.ExecutionException; @@ -196,10 +195,10 @@ public boolean checkApplicationResourcePrivileges( public IndicesAccessControl authorize( String action, Set requestedIndicesOrAliases, - Map aliasAndIndexLookup, + Metadata metadata, FieldPermissionsCache fieldPermissionsCache ) { - return indices.authorize(action, requestedIndicesOrAliases, aliasAndIndexLookup, fieldPermissionsCache); + return indices.authorize(action, requestedIndicesOrAliases, metadata, fieldPermissionsCache); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdate.java index 502d403cf979f..42baa1f769b23 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdate.java @@ -239,6 +239,11 @@ public boolean changesHeaders(TransformConfig config) { return isNullOrEqual(headers, config.getHeaders()) == false; } + public boolean changesDestIndex(TransformConfig config) { + var updatedIndex = dest == null ? null : dest.getIndex(); + return isNullOrEqual(updatedIndex, config.getDestination().getIndex()) == false; + } + private static boolean isNullOrEqual(Object lft, Object rgt) { return lft == null || lft.equals(rgt); } diff --git a/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml index 8983dd6663e65..4e0266b06bbb0 100644 --- a/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/core/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,4 +1,18 @@ org.apache.httpcomponents.httpclient: - - network: - actions: - - connect # For SamlRealm + - outbound_network # For SamlRealm +org.apache.httpcomponents.httpcore.nio: + - outbound_network +unboundid.ldapsdk: + - write_system_properties: + properties: + - java.security.auth.login.config + - javax.security.auth.useSubjectCredsOnly + - java.security.krb5.kdc + - java.security.krb5.realm + - networkaddress.cache.ttl + - networkaddress.cache.negative.ttl + - javax.net.debug + - com.unboundid.util.PassphraseEncryptedOutputStream.defaultAES128CipherTypeIterationCount + - org.bouncycastle.fips.approved_only + - org.bouncycastle.rsa.allow_multi_use + - org.bouncycastle.jsse.enable_md5 diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStepTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStepTests.java index a3318e68305c6..9050154ee6e50 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStepTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ilm/ReplaceDataStreamBackingIndexStepTests.java @@ -222,9 +222,7 @@ public void testPerformAction() { IndexMetadata indexToOperateOn = useFailureStore ? failureSourceIndexMetadata : sourceIndexMetadata; ClusterState newState = replaceSourceIndexStep.performAction(indexToOperateOn.getIndex(), clusterState); DataStream updatedDataStream = newState.metadata().dataStreams().get(dataStreamName); - DataStream.DataStreamIndices resultIndices = useFailureStore - ? updatedDataStream.getFailureIndices() - : updatedDataStream.getBackingIndices(); + DataStream.DataStreamIndices resultIndices = updatedDataStream.getDataStreamIndices(useFailureStore); assertThat(resultIndices.getIndices().size(), is(2)); assertThat(resultIndices.getIndices().get(0), is(targetIndexMetadata.getIndex())); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionActionRequestTests.java index 1872ac3caa230..f548bfa0709ed 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/UnifiedCompletionActionRequestTests.java @@ -52,7 +52,7 @@ public void testValidation_ReturnsException_When_TaskType_IsNot_Completion() { TimeValue.timeValueSeconds(10) ); var exception = request.validate(); - assertThat(exception.getMessage(), is("Validation Failed: 1: Field [taskType] must be [completion];")); + assertThat(exception.getMessage(), is("Validation Failed: 1: Field [taskType] must be [chat_completion];")); } public void testValidation_ReturnsNull_When_TaskType_IsAny() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java index feea49430cfc3..a4646c0d736c5 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/permission/LimitedRoleTests.java @@ -353,7 +353,7 @@ public void testAuthorize() { IndicesAccessControl iac = fromRole.authorize( TransportSearchAction.TYPE.name(), Sets.newHashSet("_index", "_alias1"), - md.getIndicesLookup(), + md, fieldPermissionsCache ); assertThat(iac.isGranted(), is(false)); @@ -361,12 +361,7 @@ public void testAuthorize() { assertThat(iac.hasIndexPermissions("_index"), is(true)); assertThat(iac.getIndexPermissions("_index1"), is(nullValue())); assertThat(iac.hasIndexPermissions("_index1"), is(false)); - iac = fromRole.authorize( - TransportCreateIndexAction.TYPE.name(), - Sets.newHashSet("_index", "_index1"), - md.getIndicesLookup(), - fieldPermissionsCache - ); + iac = fromRole.authorize(TransportCreateIndexAction.TYPE.name(), Sets.newHashSet("_index", "_index1"), md, fieldPermissionsCache); assertThat(iac.isGranted(), is(true)); assertThat(iac.getIndexPermissions("_index"), is(notNullValue())); assertThat(iac.hasIndexPermissions("_index"), is(true)); @@ -382,7 +377,7 @@ public void testAuthorize() { iac = limitedByRole.authorize( TransportSearchAction.TYPE.name(), Sets.newHashSet("_index", "_alias1"), - md.getIndicesLookup(), + md, fieldPermissionsCache ); assertThat(iac.isGranted(), is(false)); @@ -393,7 +388,7 @@ public void testAuthorize() { iac = limitedByRole.authorize( TransportDeleteIndexAction.TYPE.name(), Sets.newHashSet("_index", "_alias1"), - md.getIndicesLookup(), + md, fieldPermissionsCache ); assertThat(iac.isGranted(), is(false)); @@ -404,7 +399,7 @@ public void testAuthorize() { iac = limitedByRole.authorize( TransportCreateIndexAction.TYPE.name(), Sets.newHashSet("_index", "_alias1"), - md.getIndicesLookup(), + md, fieldPermissionsCache ); assertThat(iac.isGranted(), is(false)); @@ -419,34 +414,19 @@ public void testAuthorize() { } else { role = fromRole.limitedBy(limitedByRole); } - iac = role.authorize( - TransportSearchAction.TYPE.name(), - Sets.newHashSet("_index", "_alias1"), - md.getIndicesLookup(), - fieldPermissionsCache - ); + iac = role.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("_index", "_alias1"), md, fieldPermissionsCache); assertThat(iac.isGranted(), is(false)); assertThat(iac.getIndexPermissions("_index"), is(notNullValue())); assertThat(iac.hasIndexPermissions("_index"), is(true)); assertThat(iac.getIndexPermissions("_index1"), is(nullValue())); assertThat(iac.hasIndexPermissions("_index1"), is(false)); - iac = role.authorize( - TransportDeleteIndexAction.TYPE.name(), - Sets.newHashSet("_index", "_alias1"), - md.getIndicesLookup(), - fieldPermissionsCache - ); + iac = role.authorize(TransportDeleteIndexAction.TYPE.name(), Sets.newHashSet("_index", "_alias1"), md, fieldPermissionsCache); assertThat(iac.isGranted(), is(false)); assertThat(iac.getIndexPermissions("_index"), is(nullValue())); assertThat(iac.hasIndexPermissions("_index"), is(false)); assertThat(iac.getIndexPermissions("_index1"), is(nullValue())); assertThat(iac.hasIndexPermissions("_index1"), is(false)); - iac = role.authorize( - TransportCreateIndexAction.TYPE.name(), - Sets.newHashSet("_index", "_index1"), - md.getIndicesLookup(), - fieldPermissionsCache - ); + iac = role.authorize(TransportCreateIndexAction.TYPE.name(), Sets.newHashSet("_index", "_index1"), md, fieldPermissionsCache); assertThat(iac.isGranted(), is(false)); assertThat(iac.getIndexPermissions("_index"), is(nullValue())); assertThat(iac.hasIndexPermissions("_index"), is(false)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index b08dd90ae9065..107953557f3ea 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -223,7 +223,6 @@ import java.util.List; import java.util.Map; import java.util.Set; -import java.util.SortedMap; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -2634,7 +2633,7 @@ private void assertMonitoringOnRestrictedIndices(Role role) { .authorize( indexMonitoringActionName, Sets.newHashSet(internalSecurityIndex, TestRestrictedIndices.SECURITY_MAIN_ALIAS, asyncSearchIndex), - metadata.getIndicesLookup(), + metadata, fieldPermissionsCache ); assertThat(iac.hasIndexPermissions(internalSecurityIndex), is(true)); @@ -2857,20 +2856,19 @@ public void testSuperuserRole() { .build(); FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); - SortedMap lookup = metadata.getIndicesLookup(); IndicesAccessControl iac = superuserRole.indices() - .authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("a1", "ba"), lookup, fieldPermissionsCache); + .authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("a1", "ba"), metadata, fieldPermissionsCache); assertThat(iac.hasIndexPermissions("a1"), is(true)); assertThat(iac.hasIndexPermissions("b"), is(true)); iac = superuserRole.indices() - .authorize(TransportDeleteIndexAction.TYPE.name(), Sets.newHashSet("a1", "ba"), lookup, fieldPermissionsCache); + .authorize(TransportDeleteIndexAction.TYPE.name(), Sets.newHashSet("a1", "ba"), metadata, fieldPermissionsCache); assertThat(iac.hasIndexPermissions("a1"), is(true)); assertThat(iac.hasIndexPermissions("b"), is(true)); - iac = superuserRole.indices().authorize(TransportIndexAction.NAME, Sets.newHashSet("a2", "ba"), lookup, fieldPermissionsCache); + iac = superuserRole.indices().authorize(TransportIndexAction.NAME, Sets.newHashSet("a2", "ba"), metadata, fieldPermissionsCache); assertThat(iac.hasIndexPermissions("a2"), is(true)); assertThat(iac.hasIndexPermissions("b"), is(true)); iac = superuserRole.indices() - .authorize(TransportUpdateSettingsAction.TYPE.name(), Sets.newHashSet("aaaaaa", "ba"), lookup, fieldPermissionsCache); + .authorize(TransportUpdateSettingsAction.TYPE.name(), Sets.newHashSet("aaaaaa", "ba"), metadata, fieldPermissionsCache); assertThat(iac.hasIndexPermissions("aaaaaa"), is(true)); assertThat(iac.hasIndexPermissions("b"), is(true)); @@ -2879,7 +2877,7 @@ public void testSuperuserRole() { .authorize( randomFrom(TransportSearchAction.TYPE.name(), GetIndexAction.NAME), Sets.newHashSet(TestRestrictedIndices.SECURITY_MAIN_ALIAS), - lookup, + metadata, fieldPermissionsCache ); assertThat("For " + iac, iac.hasIndexPermissions(TestRestrictedIndices.SECURITY_MAIN_ALIAS), is(true)); @@ -2890,7 +2888,7 @@ public void testSuperuserRole() { .authorize( randomFrom(TransportIndexAction.NAME, TransportDeleteIndexAction.TYPE.name()), Sets.newHashSet(TestRestrictedIndices.SECURITY_MAIN_ALIAS), - lookup, + metadata, fieldPermissionsCache ); assertThat("For " + iac, iac.hasIndexPermissions(TestRestrictedIndices.SECURITY_MAIN_ALIAS), is(false)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java index 180c9ca1f674d..e6b9e7f75a87d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigTests.java @@ -78,10 +78,19 @@ public static TransformConfig randomTransformConfigWithoutHeaders(String id) { } public static TransformConfig randomTransformConfigWithoutHeaders(String id, PivotConfig pivotConfig, LatestConfig latestConfig) { + return randomTransformConfigWithoutHeaders(id, pivotConfig, latestConfig, randomDestConfig()); + } + + public static TransformConfig randomTransformConfigWithoutHeaders( + String id, + PivotConfig pivotConfig, + LatestConfig latestConfig, + DestConfig destConfig + ) { return new TransformConfig( id, randomSourceConfig(), - randomDestConfig(), + destConfig, randomBoolean() ? null : TimeValue.timeValueMillis(randomIntBetween(1_000, 3_600_000)), randomBoolean() ? null : randomSyncConfig(), null, diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdateTests.java index 061078260725b..62020f9992122 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/transform/transforms/TransformConfigUpdateTests.java @@ -126,6 +126,20 @@ public void testChangesHeaders() { assertTrue("true update changes headers", update.changesHeaders(config)); } + public void testChangesDestIndex() { + TransformConfig config = randomTransformConfig(); + TransformConfigUpdate update = new TransformConfigUpdate(null, null, null, null, null, null, null, null); + assertFalse("null update does not change destination index", update.changesDestIndex(config)); + + var newDestWithSameIndex = new DestConfig(config.getDestination().getIndex(), null, null); + update = new TransformConfigUpdate(null, newDestWithSameIndex, null, null, null, null, null, null); + assertFalse("equal update does not change destination index", update.changesDestIndex(config)); + + var newDestWithNewIndex = new DestConfig(config.getDestination().getIndex() + "-new", null, null); + update = new TransformConfigUpdate(null, newDestWithNewIndex, null, null, null, null, null, null); + assertTrue("true update changes destination index", update.changesDestIndex(config)); + } + public void testApply() { TransformConfig config = new TransformConfig( "time-transform", diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java index 0b9a538d505c9..1bc040418bf07 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/DeprecationChecks.java @@ -88,7 +88,8 @@ private DeprecationChecks() {} NodeDeprecationChecks::checkEqlEnabledSetting, NodeDeprecationChecks::checkNodeAttrData, NodeDeprecationChecks::checkWatcherBulkConcurrentRequestsSetting, - NodeDeprecationChecks::checkTracingApmSettings + NodeDeprecationChecks::checkTracingApmSettings, + NodeDeprecationChecks::checkSourceModeInComponentTemplates ); static List> INDEX_SETTINGS_CHECKS = List.of( @@ -97,8 +98,7 @@ private DeprecationChecks() {} IndexDeprecationChecks::checkIndexDataPath, IndexDeprecationChecks::storeTypeSettingCheck, IndexDeprecationChecks::frozenIndexSettingCheck, - IndexDeprecationChecks::deprecatedCamelCasePattern, - IndexDeprecationChecks::checkSourceModeInMapping + IndexDeprecationChecks::deprecatedCamelCasePattern ); static List> DATA_STREAM_CHECKS = List.of( diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java index de06e270a867e..1bef1464152db 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecks.java @@ -15,7 +15,6 @@ import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.engine.frozen.FrozenEngine; -import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.xpack.core.deprecation.DeprecatedIndexPredicate; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; @@ -203,31 +202,6 @@ static List findInPropertiesRecursively( return issues; } - static DeprecationIssue checkSourceModeInMapping(IndexMetadata indexMetadata, ClusterState clusterState) { - if (SourceFieldMapper.onOrAfterDeprecateModeVersion(indexMetadata.getCreationVersion())) { - boolean[] useSourceMode = { false }; - fieldLevelMappingIssue(indexMetadata, ((mappingMetadata, sourceAsMap) -> { - Object source = sourceAsMap.get("_source"); - if (source instanceof Map sourceMap) { - if (sourceMap.containsKey("mode")) { - useSourceMode[0] = true; - } - } - })); - if (useSourceMode[0]) { - return new DeprecationIssue( - DeprecationIssue.Level.CRITICAL, - SourceFieldMapper.DEPRECATION_WARNING, - "https://github.com/elastic/elasticsearch/pull/117172", - SourceFieldMapper.DEPRECATION_WARNING, - false, - null - ); - } - } - return null; - } - static DeprecationIssue deprecatedCamelCasePattern(IndexMetadata indexMetadata, ClusterState clusterState) { List fields = new ArrayList<>(); fieldLevelMappingIssue( diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java index b6fff5a82f0cd..f1a1f91ba35a0 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecks.java @@ -9,12 +9,15 @@ import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.script.ScriptService; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; @@ -1012,4 +1015,43 @@ static DeprecationIssue checkTracingApmSettings( DeprecationIssue.Level.CRITICAL ); } + + static DeprecationIssue checkSourceModeInComponentTemplates( + final Settings settings, + final PluginsAndModules pluginsAndModules, + final ClusterState clusterState, + final XPackLicenseState licenseState + ) { + List templates = new ArrayList<>(); + var templateNames = clusterState.metadata().componentTemplates().keySet(); + for (String templateName : templateNames) { + ComponentTemplate template = clusterState.metadata().componentTemplates().get(templateName); + if (template.template().mappings() != null) { + var sourceAsMap = (Map) XContentHelper.convertToMap(template.template().mappings().uncompressed(), true) + .v2() + .get("_doc"); + if (sourceAsMap != null) { + Object source = sourceAsMap.get("_source"); + if (source instanceof Map sourceMap) { + if (sourceMap.containsKey("mode")) { + templates.add(templateName); + } + } + } + } + + } + if (templates.isEmpty()) { + return null; + } + Collections.sort(templates); + return new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + SourceFieldMapper.DEPRECATION_WARNING, + "https://github.com/elastic/elasticsearch/pull/117172", + SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [" + String.join(", ", templates) + "]", + false, + null + ); + } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecksTests.java index b297cc1a5bdf8..712807db46ecd 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/DataStreamDeprecationChecksTests.java @@ -41,56 +41,79 @@ public void testOldIndicesCheck() { int oldIndexCount = randomIntBetween(1, 100); int newIndexCount = randomIntBetween(1, 100); - List allIndices = new ArrayList<>(); Map nameToIndexMetadata = new HashMap<>(); Set expectedIndices = new HashSet<>(); - for (int i = 0; i < oldIndexCount; i++) { - Settings.Builder settings = settings(IndexVersion.fromId(7170099)); + DataStream dataStream = createTestDataStream(oldIndexCount, 0, newIndexCount, 0, nameToIndexMetadata, expectedIndices); - String indexName = "old-data-stream-index-" + i; - if (expectedIndices.isEmpty() == false && randomIntBetween(0, 2) == 0) { - settings.put(INDEX_STORE_TYPE_SETTING.getKey(), SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE); - } else { - expectedIndices.add(indexName); - } + Metadata metadata = Metadata.builder().indices(nameToIndexMetadata).build(); + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); - Settings.Builder settingsBuilder = settings; - IndexMetadata oldIndexMetadata = IndexMetadata.builder(indexName) - .settings(settingsBuilder) - .numberOfShards(1) - .numberOfReplicas(0) - .build(); - allIndices.add(oldIndexMetadata.getIndex()); - nameToIndexMetadata.put(oldIndexMetadata.getIndex().getName(), oldIndexMetadata); - } + DeprecationIssue expected = new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + "Old data stream with a compatibility version < 9.0", + "https://www.elastic.co/guide/en/elasticsearch/reference/master/breaking-changes-9.0.html", + "This data stream has backing indices that were created before Elasticsearch 9.0.0", + false, + ofEntries( + entry("reindex_required", true), + entry("total_backing_indices", oldIndexCount + newIndexCount), + entry("indices_requiring_upgrade_count", expectedIndices.size()), + entry("indices_requiring_upgrade", expectedIndices) + ) + ); - for (int i = 0; i < newIndexCount; i++) { - Settings.Builder settingsBuilder = settings(IndexVersion.current()); - IndexMetadata newIndexMetadata = IndexMetadata.builder("new-data-stream-index-" + i) - .settings(settingsBuilder) - .numberOfShards(1) - .numberOfReplicas(0) - .build(); - allIndices.add(newIndexMetadata.getIndex()); - nameToIndexMetadata.put(newIndexMetadata.getIndex().getName(), newIndexMetadata); - } + List issues = DeprecationChecks.filterChecks(DATA_STREAM_CHECKS, c -> c.apply(dataStream, clusterState)); - DataStream dataStream = new DataStream( - randomAlphaOfLength(10), - allIndices, - randomNegativeLong(), - Map.of(), - randomBoolean(), - false, - false, - randomBoolean(), - randomFrom(IndexMode.values()), - null, - randomFrom(DataStreamOptions.EMPTY, DataStreamOptions.FAILURE_STORE_DISABLED, DataStreamOptions.FAILURE_STORE_ENABLED, null), - List.of(), - randomBoolean(), - null + assertThat(issues, equalTo(singletonList(expected))); + } + + public void testOldIndicesCheckWithOnlyClosedOrNewIndices() { + // This tests what happens when any old indices that we have are closed. We expect no deprecation warning. + int oldClosedIndexCount = randomIntBetween(1, 100); + int newOpenIndexCount = randomIntBetween(0, 100); + int newClosedIndexCount = randomIntBetween(0, 100); + + Map nameToIndexMetadata = new HashMap<>(); + Set expectedIndices = new HashSet<>(); + + DataStream dataStream = createTestDataStream( + 0, + oldClosedIndexCount, + newOpenIndexCount, + newClosedIndexCount, + nameToIndexMetadata, + expectedIndices + ); + + Metadata metadata = Metadata.builder().indices(nameToIndexMetadata).build(); + ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata).build(); + + List issues = DeprecationChecks.filterChecks(DATA_STREAM_CHECKS, c -> c.apply(dataStream, clusterState)); + + assertThat(issues.size(), equalTo(0)); + } + + public void testOldIndicesCheckWithClosedAndOpenIndices() { + /* + * This tests what happens when a data stream has old indices, and some are open and some are closed. We expect a deprecation + * warning that includes information about the old ones only. + */ + int oldOpenIndexCount = randomIntBetween(1, 100); + int oldClosedIndexCount = randomIntBetween(1, 100); + int newOpenIndexCount = randomIntBetween(0, 100); + int newClosedIndexCount = randomIntBetween(0, 100); + + Map nameToIndexMetadata = new HashMap<>(); + Set expectedIndices = new HashSet<>(); + + DataStream dataStream = createTestDataStream( + oldOpenIndexCount, + oldClosedIndexCount, + newOpenIndexCount, + newClosedIndexCount, + nameToIndexMetadata, + expectedIndices ); Metadata metadata = Metadata.builder().indices(nameToIndexMetadata).build(); @@ -104,7 +127,7 @@ public void testOldIndicesCheck() { false, ofEntries( entry("reindex_required", true), - entry("total_backing_indices", oldIndexCount + newIndexCount), + entry("total_backing_indices", oldOpenIndexCount + oldClosedIndexCount + newOpenIndexCount + newClosedIndexCount), entry("indices_requiring_upgrade_count", expectedIndices.size()), entry("indices_requiring_upgrade", expectedIndices) ) @@ -115,4 +138,90 @@ public void testOldIndicesCheck() { assertThat(issues, equalTo(singletonList(expected))); } + /* + * This creates a test DataStream with the given counts. The nameToIndexMetadata Map and the expectedIndices Set are mutable collections + * that will be populated by this method. + */ + private DataStream createTestDataStream( + int oldOpenIndexCount, + int oldClosedIndexCount, + int newOpenIndexCount, + int newClosedIndexCount, + Map nameToIndexMetadata, + Set expectedIndices + ) { + List allIndices = new ArrayList<>(); + + for (int i = 0; i < oldOpenIndexCount; i++) { + allIndices.add(createOldIndex(i, false, nameToIndexMetadata, expectedIndices)); + } + for (int i = 0; i < oldClosedIndexCount; i++) { + allIndices.add(createOldIndex(i, true, nameToIndexMetadata, null)); + } + for (int i = 0; i < newOpenIndexCount; i++) { + allIndices.add(createNewIndex(i, false, nameToIndexMetadata)); + } + for (int i = 0; i < newClosedIndexCount; i++) { + allIndices.add(createNewIndex(i, true, nameToIndexMetadata)); + } + + DataStream dataStream = new DataStream( + randomAlphaOfLength(10), + allIndices, + randomNegativeLong(), + Map.of(), + randomBoolean(), + false, + false, + randomBoolean(), + randomFrom(IndexMode.values()), + null, + randomFrom(DataStreamOptions.EMPTY, DataStreamOptions.FAILURE_STORE_DISABLED, DataStreamOptions.FAILURE_STORE_ENABLED, null), + List.of(), + randomBoolean(), + null + ); + return dataStream; + } + + private Index createOldIndex( + int suffix, + boolean isClosed, + Map nameToIndexMetadata, + Set expectedIndices + ) { + return createIndex(true, suffix, isClosed, nameToIndexMetadata, expectedIndices); + } + + private Index createNewIndex(int suffix, boolean isClosed, Map nameToIndexMetadata) { + return createIndex(false, suffix, isClosed, nameToIndexMetadata, null); + } + + private Index createIndex( + boolean isOld, + int suffix, + boolean isClosed, + Map nameToIndexMetadata, + Set expectedIndices + ) { + Settings.Builder settingsBuilder = isOld ? settings(IndexVersion.fromId(7170099)) : settings(IndexVersion.current()); + String indexName = (isOld ? "old-" : "new-") + (isClosed ? "closed-" : "") + "data-stream-index-" + suffix; + if (isOld && isClosed == false) { // we only expect warnings on open old indices + if (expectedIndices.isEmpty() == false && randomIntBetween(0, 2) == 0) { + settingsBuilder.put(INDEX_STORE_TYPE_SETTING.getKey(), SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE); + } else { + expectedIndices.add(indexName); + } + } + IndexMetadata.Builder indexMetadataBuilder = IndexMetadata.builder(indexName) + .settings(settingsBuilder) + .numberOfShards(1) + .numberOfReplicas(0); + if (isClosed) { + indexMetadataBuilder.state(IndexMetadata.State.CLOSE); + } + IndexMetadata indexMetadata = indexMetadataBuilder.build(); + nameToIndexMetadata.put(indexMetadata.getIndex().getName(), indexMetadata); + return indexMetadata.getIndex(); + } } diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java index c6f3208a1cfb0..de229c555ade1 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java @@ -116,6 +116,22 @@ public void testOldIndicesCheckSnapshotIgnored() { assertThat(issues, empty()); } + public void testOldIndicesCheckClosedIgnored() { + IndexVersion createdWith = IndexVersion.fromId(7170099); + Settings.Builder settings = settings(createdWith); + IndexMetadata indexMetadata = IndexMetadata.builder("test") + .settings(settings) + .numberOfShards(1) + .numberOfReplicas(0) + .state(IndexMetadata.State.CLOSE) + .build(); + ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata(Metadata.builder().put(indexMetadata, true)) + .build(); + List issues = DeprecationChecks.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetadata, clusterState)); + assertThat(issues, empty()); + } + public void testTranslogRetentionSettings() { Settings.Builder settings = settings(IndexVersion.current()); settings.put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), randomPositiveTimeValue()); diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java index 3aaee0e5cdb52..7fe2be2736ea8 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/NodeDeprecationChecksTests.java @@ -11,23 +11,29 @@ import org.elasticsearch.action.admin.cluster.node.info.PluginsAndModules; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.ComponentTemplate; import org.elasticsearch.cluster.metadata.Metadata; +import org.elasticsearch.cluster.metadata.Template; import org.elasticsearch.cluster.routing.allocation.DataTier; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; +import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.deprecation.DeprecationIssue; import org.elasticsearch.xpack.core.ilm.LifecycleSettings; +import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -832,4 +838,42 @@ public void testCheckNodeAttrData() { ); assertThat(issues, hasItem(expected)); } + + public void testCheckSourceModeInComponentTemplates() throws IOException { + Template template = Template.builder().mappings(CompressedXContent.fromJSON(""" + { "_doc": { "_source": { "mode": "stored"} } }""")).build(); + ComponentTemplate componentTemplate = new ComponentTemplate(template, 1L, new HashMap<>()); + + Template template2 = Template.builder().mappings(CompressedXContent.fromJSON(""" + { "_doc": { "_source": { "enabled": false} } }""")).build(); + ComponentTemplate componentTemplate2 = new ComponentTemplate(template2, 1L, new HashMap<>()); + + ClusterState clusterState = ClusterState.builder(ClusterState.EMPTY_STATE) + .metadata( + Metadata.builder() + .componentTemplates( + Map.of("my-template-1", componentTemplate, "my-template-2", componentTemplate, "my-template-3", componentTemplate2) + ) + ) + .build(); + + final List issues = DeprecationChecks.filterChecks( + DeprecationChecks.NODE_SETTINGS_CHECKS, + c -> c.apply( + Settings.EMPTY, + new PluginsAndModules(Collections.emptyList(), Collections.emptyList()), + clusterState, + new XPackLicenseState(() -> 0) + ) + ); + final DeprecationIssue expected = new DeprecationIssue( + DeprecationIssue.Level.CRITICAL, + SourceFieldMapper.DEPRECATION_WARNING, + "https://github.com/elastic/elasticsearch/pull/117172", + SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [my-template-1, my-template-2]", + false, + null + ); + assertThat(issues, hasItem(expected)); + } } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java index 114a26bbe83f0..ddcad949b6a79 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/AbstractEnrichProcessor.java @@ -90,7 +90,7 @@ public void execute(IngestDocument ingestDocument, BiConsumer * The key of the cache is based on the search request and the enrich index that will be used. * Search requests that enrich generates target the alias for an enrich policy, this class * resolves the alias to the actual enrich index and uses that for the cache key. This way * no stale entries will be returned if a policy execution happens and a new enrich index is created. - * + *

    * There is no cleanup mechanism of stale entries in case a new enrich index is created * as part of a policy execution. This shouldn't be needed as cache entries for prior enrich * indices will be eventually evicted, because these entries will not end up being used. The diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/GeoMatchProcessor.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/GeoMatchProcessor.java index b091ec9b94752..dd164c630495c 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/GeoMatchProcessor.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/GeoMatchProcessor.java @@ -40,7 +40,7 @@ public final class GeoMatchProcessor extends AbstractEnrichProcessor { ) { super(tag, description, searchRunner, policyName, field, targetField, ignoreMissing, overrideEnabled, matchField, maxMatches); this.shapeRelation = shapeRelation; - parser = new GeometryParser(orientation.getAsBoolean(), true, true); + this.parser = new GeometryParser(orientation.getAsBoolean(), true, true); } @Override @@ -50,8 +50,4 @@ public QueryBuilder getQueryBuilder(Object fieldValue) { shapeQuery.relation(shapeRelation); return shapeQuery; } - - public ShapeRelation getShapeRelation() { - return shapeRelation; - } } diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java index 40356f2824494..4fb12bb5ca3c7 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichCoordinatorProxyAction.java @@ -177,7 +177,7 @@ void coordinateLookups() { assert slots.isEmpty() == false; remoteRequestsTotal.increment(); final MultiSearchRequest multiSearchRequest = new MultiSearchRequest(); - slots.forEach(slot -> multiSearchRequest.add(slot.searchRequest)); + slots.forEach(slot -> multiSearchRequest.add(slot.request)); lookupFunction.accept(multiSearchRequest, (response, e) -> handleResponse(slots, response, e)); } } @@ -193,13 +193,13 @@ void handleResponse(List slots, MultiSearchResponse response, Exception e) Slot slot = slots.get(i); if (responseItem.isFailure()) { - slot.actionListener.onFailure(responseItem.getFailure()); + slot.listener.onFailure(responseItem.getFailure()); } else { - slot.actionListener.onResponse(responseItem.getResponse()); + slot.listener.onResponse(responseItem.getResponse()); } } } else if (e != null) { - slots.forEach(slot -> slot.actionListener.onFailure(e)); + slots.forEach(slot -> slot.listener.onFailure(e)); } else { throw new AssertionError("no response and no error"); } @@ -208,14 +208,10 @@ void handleResponse(List slots, MultiSearchResponse response, Exception e) coordinateLookups(); } - static class Slot { - - final SearchRequest searchRequest; - final ActionListener actionListener; - - Slot(SearchRequest searchRequest, ActionListener actionListener) { - this.searchRequest = Objects.requireNonNull(searchRequest); - this.actionListener = Objects.requireNonNull(actionListener); + record Slot(SearchRequest request, ActionListener listener) { + Slot { + Objects.requireNonNull(request); + Objects.requireNonNull(listener); } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/EntryExpression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/EntryExpression.java new file mode 100644 index 0000000000000..e6f05e95a0757 --- /dev/null +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/EntryExpression.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.core.expression; + +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; + +import java.io.IOException; +import java.util.List; +import java.util.Objects; + +/** + * Represent a key-value pair. + */ +public class EntryExpression extends Expression { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "EntryExpression", + EntryExpression::readFrom + ); + + private final Expression key; + + private final Expression value; + + public EntryExpression(Source source, Expression key, Expression value) { + super(source, List.of(key, value)); + this.key = key; + this.value = value; + } + + private static EntryExpression readFrom(StreamInput in) throws IOException { + return new EntryExpression( + Source.readFrom((StreamInput & PlanStreamInput) in), + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class) + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + source().writeTo(out); + out.writeNamedWriteable(key); + out.writeNamedWriteable(value); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + public Expression replaceChildren(List newChildren) { + return new EntryExpression(source(), newChildren.get(0), newChildren.get(1)); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, EntryExpression::new, key, value); + } + + public Expression key() { + return key; + } + + public Expression value() { + return value; + } + + @Override + public DataType dataType() { + return value.dataType(); + } + + @Override + public Nullability nullable() { + return Nullability.FALSE; + } + + @Override + public int hashCode() { + return Objects.hash(key, value); + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + EntryExpression other = (EntryExpression) obj; + return Objects.equals(key, other.key) && Objects.equals(value, other.value); + } + + @Override + public String toString() { + return key.toString() + ":" + value.toString(); + } +} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ExpressionCoreWritables.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ExpressionCoreWritables.java index 174a0321a3057..3ea37f88d80b2 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ExpressionCoreWritables.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/ExpressionCoreWritables.java @@ -29,6 +29,7 @@ public static List expressions() { entries.add(new NamedWriteableRegistry.Entry(Expression.class, e.name, in -> (Expression) e.reader.read(in))); } entries.add(Literal.ENTRY); + entries.addAll(mapExpressions()); return entries; } @@ -45,4 +46,8 @@ public static List namedExpressions() { public static List attributes() { return List.of(FieldAttribute.ENTRY, MetadataAttribute.ENTRY, ReferenceAttribute.ENTRY); } + + public static List mapExpressions() { + return List.of(EntryExpression.ENTRY, MapExpression.ENTRY); + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MapExpression.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MapExpression.java new file mode 100644 index 0000000000000..861ecc4ca0368 --- /dev/null +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/MapExpression.java @@ -0,0 +1,144 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.core.expression; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.stream.Collectors; + +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSUPPORTED; + +/** + * Represent a collect of key-value pairs. + */ +public class MapExpression extends Expression { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "MapExpression", + MapExpression::readFrom + ); + + private final List entryExpressions; + + private final Map map; + + private final Map keyFoldedMap; + + public MapExpression(Source source, List entries) { + super(source, entries); + int entryCount = entries.size() / 2; + this.entryExpressions = new ArrayList<>(entryCount); + this.map = new LinkedHashMap<>(entryCount); + // create a map with key folded and source removed to make the retrieval of value easier + this.keyFoldedMap = new LinkedHashMap<>(entryCount); + for (int i = 0; i < entryCount; i++) { + Expression key = entries.get(i * 2); + Expression value = entries.get(i * 2 + 1); + entryExpressions.add(new EntryExpression(key.source(), key, value)); + map.put(key, value); + if (key instanceof Literal l) { + this.keyFoldedMap.put(l.value(), value); + } + } + } + + private static MapExpression readFrom(StreamInput in) throws IOException { + return new MapExpression( + Source.readFrom((StreamInput & PlanStreamInput) in), + in.readNamedWriteableCollectionAsList(Expression.class) + ); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + source().writeTo(out); + out.writeNamedWriteableCollection(children()); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + public MapExpression replaceChildren(List newChildren) { + return new MapExpression(source(), newChildren); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, MapExpression::new, children()); + } + + public List entryExpressions() { + return entryExpressions; + } + + public Map map() { + return map; + } + + public Map keyFoldedMap() { + return keyFoldedMap; + } + + @Override + public Nullability nullable() { + return Nullability.FALSE; + } + + @Override + public DataType dataType() { + return UNSUPPORTED; + } + + @Override + public int hashCode() { + return Objects.hash(entryExpressions); + } + + public Expression get(Object key) { + if (key instanceof Expression) { + return map.get(key); + } else { + // the key(literal) could be converted to BytesRef by ConvertStringToByteRef + return keyFoldedMap.containsKey(key) ? keyFoldedMap.get(key) : keyFoldedMap.get(new BytesRef(key.toString())); + } + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + + MapExpression other = (MapExpression) obj; + return Objects.equals(entryExpressions, other.entryExpressions); + } + + @Override + public String toString() { + String str = entryExpressions.stream().map(String::valueOf).collect(Collectors.joining(", ")); + return "{ " + str + " }"; + } +} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TranslationAware.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TranslationAware.java deleted file mode 100644 index b1ac2b36314fa..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TranslationAware.java +++ /dev/null @@ -1,20 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.expression; - -import org.elasticsearch.xpack.esql.core.planner.TranslatorHandler; -import org.elasticsearch.xpack.esql.core.querydsl.query.Query; - -/** - * Expressions can implement this interface to control how they would be translated and pushed down as Lucene queries. - * When an expression implements {@link TranslationAware}, we call {@link #asQuery(TranslatorHandler)} to get the - * {@link Query} translation, instead of relying on the registered translators from EsqlExpressionTranslators. - */ -public interface TranslationAware { - Query asQuery(TranslatorHandler translatorHandler); -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java index 842f3c0ddadd7..e9d4aacfe9fa7 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java +++ b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/TypeResolutions.java @@ -253,4 +253,19 @@ private static String acceptedTypesForErrorMsg(String... acceptedTypes) { return acceptedTypes[0]; } } + + public static TypeResolution isMapExpression(Expression e, String operationName, ParamOrdinal paramOrd) { + if (e instanceof MapExpression == false) { + return new TypeResolution( + format( + null, + "{}argument of [{}] must be a map expression, received [{}]", + paramOrd == null || paramOrd == DEFAULT ? "" : paramOrd.name().toLowerCase(Locale.ROOT) + " ", + operationName, + Expressions.name(e) + ) + ); + } + return TypeResolution.TYPE_RESOLVED; + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java deleted file mode 100644 index 210e8265dcfe9..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogic.java +++ /dev/null @@ -1,65 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.esql.core.expression.predicate.logical; - -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Nullability; -import org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal; -import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; - -import java.io.IOException; - -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isBoolean; - -public abstract class BinaryLogic extends BinaryOperator { - - protected BinaryLogic(Source source, Expression left, Expression right, BinaryLogicOperation operation) { - super(source, left, right, operation); - } - - protected BinaryLogic(StreamInput in, BinaryLogicOperation op) throws IOException { - this( - Source.readFrom((StreamInput & PlanStreamInput) in), - in.readNamedWriteable(Expression.class), - in.readNamedWriteable(Expression.class), - op - ); - } - - @Override - public final void writeTo(StreamOutput out) throws IOException { - Source.EMPTY.writeTo(out); - out.writeNamedWriteable(left()); - out.writeNamedWriteable(right()); - } - - @Override - public DataType dataType() { - return DataType.BOOLEAN; - } - - @Override - protected TypeResolution resolveInputType(Expression e, ParamOrdinal paramOrdinal) { - return isBoolean(e, sourceText(), paramOrdinal); - } - - @Override - public Nullability nullable() { - // Cannot fold null due to 3vl, constant folding will do any possible folding. - return Nullability.UNKNOWN; - } - - @Override - protected boolean isCommutative() { - return true; - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslator.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslator.java deleted file mode 100644 index db148e2d63fa1..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslator.java +++ /dev/null @@ -1,48 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.planner; - -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; -import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; -import org.elasticsearch.xpack.esql.core.querydsl.query.Query; -import org.elasticsearch.xpack.esql.core.util.Check; -import org.elasticsearch.xpack.esql.core.util.ReflectionUtils; - -public abstract class ExpressionTranslator { - - private final Class typeToken = ReflectionUtils.detectSuperTypeForRuleLike(getClass()); - - @SuppressWarnings("unchecked") - public Query translate(Expression exp, TranslatorHandler handler) { - return (typeToken.isInstance(exp) ? asQuery((E) exp, handler) : null); - } - - protected abstract Query asQuery(E e, TranslatorHandler handler); - - public static FieldAttribute checkIsFieldAttribute(Expression e) { - Check.isTrue(e instanceof FieldAttribute, "Expected a FieldAttribute but received [{}]", e); - return (FieldAttribute) e; - } - - public static TypedAttribute checkIsPushableAttribute(Expression e) { - Check.isTrue( - e instanceof FieldAttribute || e instanceof MetadataAttribute, - "Expected a FieldAttribute or MetadataAttribute but received [{}]", - e - ); - return (TypedAttribute) e; - } - - public static String pushableAttributeName(TypedAttribute attribute) { - return attribute instanceof FieldAttribute fa - ? fa.exactAttribute().name() // equality should always be against an exact match (which is important for strings) - : attribute.name(); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java deleted file mode 100644 index e0f4f6b032662..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/ExpressionTranslators.java +++ /dev/null @@ -1,163 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.planner; - -import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLike; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RegexMatch; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike; -import org.elasticsearch.xpack.esql.core.querydsl.query.BoolQuery; -import org.elasticsearch.xpack.esql.core.querydsl.query.ExistsQuery; -import org.elasticsearch.xpack.esql.core.querydsl.query.NotQuery; -import org.elasticsearch.xpack.esql.core.querydsl.query.Query; -import org.elasticsearch.xpack.esql.core.querydsl.query.RegexQuery; -import org.elasticsearch.xpack.esql.core.querydsl.query.WildcardQuery; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.util.Check; -import org.elasticsearch.xpack.esql.core.util.CollectionUtils; - -import java.util.Arrays; -import java.util.List; - -public final class ExpressionTranslators { - - // TODO: see whether escaping is needed - @SuppressWarnings("rawtypes") - public static class Likes extends ExpressionTranslator { - - @Override - protected Query asQuery(RegexMatch e, TranslatorHandler handler) { - return doTranslate(e, handler); - } - - public static Query doTranslate(RegexMatch e, TranslatorHandler handler) { - Query q; - Expression field = e.field(); - - if (field instanceof FieldAttribute fa) { - return handler.wrapFunctionQuery(e, fa, () -> translateField(e, handler.nameOf(fa.exactAttribute()))); - } else if (field instanceof MetadataAttribute ma) { - q = translateField(e, handler.nameOf(ma)); - } else { - throw new QlIllegalArgumentException("Cannot translate query for " + e); - } - - return q; - } - - private static Query translateField(RegexMatch e, String targetFieldName) { - if (e instanceof WildcardLike l) { - return new WildcardQuery(e.source(), targetFieldName, l.pattern().asLuceneWildcard(), l.caseInsensitive()); - } - if (e instanceof RLike rl) { - return new RegexQuery(e.source(), targetFieldName, rl.pattern().asJavaRegex(), rl.caseInsensitive()); - } - return null; - } - } - - public static class BinaryLogic extends ExpressionTranslator< - org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic> { - - @Override - protected Query asQuery(org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic e, TranslatorHandler handler) { - if (e instanceof And) { - return and(e.source(), handler.asQuery(e.left()), handler.asQuery(e.right())); - } - if (e instanceof Or) { - return or(e.source(), handler.asQuery(e.left()), handler.asQuery(e.right())); - } - - return null; - } - } - - public static class Nots extends ExpressionTranslator { - - @Override - protected Query asQuery(Not not, TranslatorHandler handler) { - return doTranslate(not, handler); - } - - public static Query doTranslate(Not not, TranslatorHandler handler) { - return handler.asQuery(not.field()).negate(not.source()); - } - } - - public static class IsNotNulls extends ExpressionTranslator { - - @Override - protected Query asQuery(IsNotNull isNotNull, TranslatorHandler handler) { - return doTranslate(isNotNull, handler); - } - - public static Query doTranslate(IsNotNull isNotNull, TranslatorHandler handler) { - return handler.wrapFunctionQuery(isNotNull, isNotNull.field(), () -> translate(isNotNull, handler)); - } - - private static Query translate(IsNotNull isNotNull, TranslatorHandler handler) { - return new ExistsQuery(isNotNull.source(), handler.nameOf(isNotNull.field())); - } - } - - public static class IsNulls extends ExpressionTranslator { - - @Override - protected Query asQuery(IsNull isNull, TranslatorHandler handler) { - return doTranslate(isNull, handler); - } - - public static Query doTranslate(IsNull isNull, TranslatorHandler handler) { - return handler.wrapFunctionQuery(isNull, isNull.field(), () -> translate(isNull, handler)); - } - - private static Query translate(IsNull isNull, TranslatorHandler handler) { - return new NotQuery(isNull.source(), new ExistsQuery(isNull.source(), handler.nameOf(isNull.field()))); - } - } - - public static Query or(Source source, Query left, Query right) { - return boolQuery(source, left, right, false); - } - - private static Query and(Source source, Query left, Query right) { - return boolQuery(source, left, right, true); - } - - private static Query boolQuery(Source source, Query left, Query right, boolean isAnd) { - Check.isTrue(left != null || right != null, "Both expressions are null"); - if (left == null) { - return right; - } - if (right == null) { - return left; - } - List queries; - // check if either side is already a bool query to an extra bool query - if (left instanceof BoolQuery leftBool && leftBool.isAnd() == isAnd) { - if (right instanceof BoolQuery rightBool && rightBool.isAnd() == isAnd) { - queries = CollectionUtils.combine(leftBool.queries(), rightBool.queries()); - } else { - queries = CollectionUtils.combine(leftBool.queries(), right); - } - } else if (right instanceof BoolQuery bool && bool.isAnd() == isAnd) { - queries = CollectionUtils.combine(bool.queries(), left); - } else { - queries = Arrays.asList(left, right); - } - return new BoolQuery(source, isAnd, queries); - } -} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/TranslatorHandler.java b/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/TranslatorHandler.java deleted file mode 100644 index b85544905595a..0000000000000 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/planner/TranslatorHandler.java +++ /dev/null @@ -1,36 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.core.planner; - -import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.esql.core.querydsl.query.Query; - -import java.util.function.Supplier; - -/** - * Parameterized handler used during query translation. - * - * Provides contextual utilities for an individual query to be performed. - */ -public interface TranslatorHandler { - - Query asQuery(Expression e); - - default Query wrapFunctionQuery(ScalarFunction sf, Expression field, Supplier querySupplier) { - if (field instanceof FieldAttribute) { - return querySupplier.get(); - } - throw new QlIllegalArgumentException("Cannot translate expression:[" + sf.sourceText() + "]"); - } - - String nameOf(Expression e); - -} diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/util/TestUtils.java b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/util/TestUtils.java index 40321fddebdfe..34a477c70c504 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/util/TestUtils.java +++ b/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/util/TestUtils.java @@ -7,10 +7,8 @@ package org.elasticsearch.xpack.esql.core.util; -import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.predicate.Range; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -21,7 +19,6 @@ import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; import static org.elasticsearch.test.ESTestCase.randomBoolean; import static org.elasticsearch.test.ESTestCase.randomFrom; -import static org.elasticsearch.test.ESTestCase.randomZone; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; import static org.elasticsearch.xpack.esql.core.type.DataType.INTEGER; @@ -44,10 +41,6 @@ public static Literal of(Source source, Object value) { return new Literal(source, value, DataType.fromJava(value)); } - public static Range rangeOf(Expression value, Expression lower, boolean includeLower, Expression upper, boolean includeUpper) { - return new Range(EMPTY, value, lower, includeLower, upper, includeUpper, randomZone()); - } - public static FieldAttribute fieldAttribute() { return fieldAttribute(randomAlphaOfLength(10), randomFrom(DataType.types())); } diff --git a/x-pack/plugin/esql/build.gradle b/x-pack/plugin/esql/build.gradle index d7fa786a24e7c..8d2050fb43044 100644 --- a/x-pack/plugin/esql/build.gradle +++ b/x-pack/plugin/esql/build.gradle @@ -58,6 +58,7 @@ dependencies { testImplementation project(path: ':modules:analysis-common') testImplementation project(path: ':modules:ingest-common') testImplementation project(path: ':modules:legacy-geo') + testImplementation project(xpackModule('esql:compute:test')) testImplementation('net.nextencia:rrdiagram:0.9.4') testImplementation('org.webjars.npm:fontsource__roboto-mono:4.5.7') diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index 8e866cec3f421..1a349f6b8424e 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -19,6 +19,14 @@ dependencies { testImplementation(project(':modules:analysis-common')) testImplementation(project(':test:framework')) testImplementation(project(xpackModule('esql-core'))) + testImplementation(project(xpackModule('esql:compute:test'))) { + /* + * compute:test depends on this project's main code which is fine + * but we don't need to pull in the jar because we already have + * it being, you know, in this project.... + */ + transitive = false + } testImplementation(project(xpackModule('core'))) testImplementation(project(xpackModule('ml'))) } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java index b4e74d52ffeb8..5d2de94c86651 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java @@ -39,6 +39,7 @@ public Set getSupportedAnnotationTypes() { "org.elasticsearch.injection.guice.Inject", "org.elasticsearch.xpack.esql.expression.function.FunctionInfo", "org.elasticsearch.xpack.esql.expression.function.Param", + "org.elasticsearch.xpack.esql.expression.function.MapParam", "org.elasticsearch.rest.ServerlessScope", "org.elasticsearch.xcontent.ParserConstructor", "org.elasticsearch.core.UpdateForV9", diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java index 8b952ee0d951a..c84029b4ceeb4 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorFixedBuilder.java @@ -15,7 +15,7 @@ * it's faster. * This class is generated. Do not edit it. */ -final class BooleanVectorFixedBuilder implements BooleanVector.FixedBuilder { +public final class BooleanVectorFixedBuilder implements BooleanVector.FixedBuilder { private final BlockFactory blockFactory; private final boolean[] values; private final long preAdjustedBytes; @@ -84,7 +84,7 @@ public void close() { } } - boolean isReleased() { + public boolean isReleased() { return closed; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java index ff363b36e44b1..e84040578acf7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorFixedBuilder.java @@ -15,7 +15,7 @@ * it's faster. * This class is generated. Do not edit it. */ -final class DoubleVectorFixedBuilder implements DoubleVector.FixedBuilder { +public final class DoubleVectorFixedBuilder implements DoubleVector.FixedBuilder { private final BlockFactory blockFactory; private final double[] values; private final long preAdjustedBytes; @@ -84,7 +84,7 @@ public void close() { } } - boolean isReleased() { + public boolean isReleased() { return closed; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorFixedBuilder.java index b8d8c48823720..9c4f2b3986c7e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/FloatVectorFixedBuilder.java @@ -15,7 +15,7 @@ * it's faster. * This class is generated. Do not edit it. */ -final class FloatVectorFixedBuilder implements FloatVector.FixedBuilder { +public final class FloatVectorFixedBuilder implements FloatVector.FixedBuilder { private final BlockFactory blockFactory; private final float[] values; private final long preAdjustedBytes; @@ -84,7 +84,7 @@ public void close() { } } - boolean isReleased() { + public boolean isReleased() { return closed; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java index 9ab01d019252a..6af564735c073 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorFixedBuilder.java @@ -15,7 +15,7 @@ * it's faster. * This class is generated. Do not edit it. */ -final class IntVectorFixedBuilder implements IntVector.FixedBuilder { +public final class IntVectorFixedBuilder implements IntVector.FixedBuilder { private final BlockFactory blockFactory; private final int[] values; private final long preAdjustedBytes; @@ -84,7 +84,7 @@ public void close() { } } - boolean isReleased() { + public boolean isReleased() { return closed; } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java index 77dd0a87dfb2f..33cf0e5dc82e2 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorFixedBuilder.java @@ -15,7 +15,7 @@ * it's faster. * This class is generated. Do not edit it. */ -final class LongVectorFixedBuilder implements LongVector.FixedBuilder { +public final class LongVectorFixedBuilder implements LongVector.FixedBuilder { private final BlockFactory blockFactory; private final long[] values; private final long preAdjustedBytes; @@ -84,7 +84,7 @@ public void close() { } } - boolean isReleased() { + public boolean isReleased() { return closed; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java index 9ec726c6b250d..4755b1d609cfb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlockBuilder.java @@ -12,7 +12,7 @@ import java.util.BitSet; import java.util.stream.IntStream; -abstract class AbstractBlockBuilder implements Block.Builder { +public abstract class AbstractBlockBuilder implements Block.Builder { protected final BlockFactory blockFactory; @@ -34,7 +34,7 @@ abstract class AbstractBlockBuilder implements Block.Builder { /** The number of bytes currently estimated with the breaker. */ protected long estimatedBytes; - boolean closed = false; + private boolean closed = false; protected AbstractBlockBuilder(BlockFactory blockFactory) { this.blockFactory = blockFactory; @@ -183,4 +183,8 @@ private void setFirstValue(int position, int value) { } firstValueIndexes[position] = value; } + + public boolean isReleased() { + return closed; + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java index 7ee4ff2441f4e..b99404429a51b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVectorBuilder.java @@ -7,13 +7,13 @@ package org.elasticsearch.compute.data; -abstract class AbstractVectorBuilder implements Vector.Builder { +public abstract class AbstractVectorBuilder implements Vector.Builder { protected int valueCount; /** * Has this builder been closed already? */ - boolean closed = false; + private boolean closed = false; protected final BlockFactory blockFactory; @@ -90,4 +90,8 @@ public final void close() { * Called when first {@link #close() closed}. */ protected void extraClose() {} + + public boolean isReleased() { + return closed; + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java index 155898ebdc6c8..f66ae42106ca2 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java @@ -81,7 +81,7 @@ public BlockFactory newChildFactory(LocalCircuitBreaker childBreaker) { * be adjusted without tripping. * @throws CircuitBreakingException if the breaker was put above its limit */ - void adjustBreaker(final long delta) throws CircuitBreakingException { + public void adjustBreaker(final long delta) throws CircuitBreakingException { // checking breaker means potentially tripping, but it doesn't // have to if the delta is negative if (delta > 0) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st index a8876c5120090..c08478829c818 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorFixedBuilder.java.st @@ -15,7 +15,7 @@ import org.apache.lucene.util.RamUsageEstimator; * it's faster. * This class is generated. Do not edit it. */ -final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { +public final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { private final BlockFactory blockFactory; private final $type$[] values; private final long preAdjustedBytes; @@ -84,7 +84,7 @@ final class $Type$VectorFixedBuilder implements $Type$Vector.FixedBuilder { } } - boolean isReleased() { + public boolean isReleased() { return closed; } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java index e6ef10e53ec7c..a5061b8cf6d32 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/OperatorTests.java @@ -37,7 +37,6 @@ import org.elasticsearch.compute.aggregation.blockhash.BlockHash; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DocBlock; import org.elasticsearch.compute.data.DocVector; @@ -57,12 +56,13 @@ import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.LimitOperator; import org.elasticsearch.compute.operator.Operator; -import org.elasticsearch.compute.operator.OperatorTestCase; import org.elasticsearch.compute.operator.OrdinalsGroupingOperator; import org.elasticsearch.compute.operator.PageConsumerOperator; import org.elasticsearch.compute.operator.RowInTableLookupOperator; -import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.ShuffleDocsOperator; +import org.elasticsearch.compute.test.BlockTestUtils; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Releasables; import org.elasticsearch.index.mapper.KeywordFieldMapper; @@ -84,7 +84,7 @@ import static org.elasticsearch.compute.aggregation.AggregatorMode.FINAL; import static org.elasticsearch.compute.aggregation.AggregatorMode.INITIAL; -import static org.elasticsearch.compute.operator.OperatorTestCase.randomPageSize; +import static org.elasticsearch.compute.test.OperatorTestCase.randomPageSize; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java index e2c9c255b67bd..3eaf85c27e596 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/AggregatorFunctionTestCase.java @@ -11,7 +11,6 @@ import org.elasticsearch.compute.ConstantBooleanExpressionEvaluator; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; @@ -21,17 +20,18 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.compute.operator.AddGarbageRowsSourceOperator; import org.elasticsearch.compute.operator.AggregationOperator; -import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.ForkingOperatorTestCase; import org.elasticsearch.compute.operator.NullInsertingSourceOperator; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.PositionMergingSourceOperator; -import org.elasticsearch.compute.operator.TestResultPageSinkOperator; +import org.elasticsearch.compute.test.BlockTestUtils; +import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.TestBlockFactory; +import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.hamcrest.Matcher; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ArrayStateTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ArrayStateTests.java index 634dc4f7c7ed7..f736569a116e1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ArrayStateTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ArrayStateTests.java @@ -15,12 +15,12 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.test.BlockTestUtils; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.type.DataType; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java index a81ce65c5360b..452fa206a5590 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountAggregatorFunctionTests.java @@ -7,16 +7,16 @@ package org.elasticsearch.compute.aggregation; -import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import java.util.List; import java.util.stream.LongStream; +import static org.elasticsearch.compute.test.BlockTestUtils.valuesAtPositions; import static org.hamcrest.Matchers.equalTo; public class CountAggregatorFunctionTests extends AggregatorFunctionTestCase { @@ -44,6 +44,6 @@ protected void assertSimpleOutput(List input, Block result) { @Override protected void assertOutputFromEmpty(Block b) { assertThat(b.getPositionCount(), equalTo(1)); - assertThat(BasicBlockTests.valuesAtPositions(b, 0, 1), equalTo(List.of(List.of(0L)))); + assertThat(valuesAtPositions(b, 0, 1), equalTo(List.of(List.of(0L)))); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java index 84ab9d787aec1..1c0f3c4f64cb5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunctionTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.aggregation; -import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; @@ -17,6 +16,7 @@ import java.util.List; import java.util.stream.LongStream; +import static org.elasticsearch.compute.test.BlockTestUtils.valuesAtPositions; import static org.hamcrest.Matchers.equalTo; public class CountDistinctBooleanAggregatorFunctionTests extends AggregatorFunctionTestCase { @@ -45,6 +45,6 @@ protected void assertSimpleOutput(List input, Block result) { @Override protected void assertOutputFromEmpty(Block b) { assertThat(b.getPositionCount(), equalTo(1)); - assertThat(BasicBlockTests.valuesAtPositions(b, 0, 1), equalTo(List.of(List.of(0L)))); + assertThat(valuesAtPositions(b, 0, 1), equalTo(List.of(List.of(0L)))); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java index 8872e65054fa9..e8e51c2adf291 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunctionTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.compute.aggregation; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; @@ -18,6 +17,7 @@ import java.util.List; import java.util.stream.LongStream; +import static org.elasticsearch.compute.test.BlockTestUtils.valuesAtPositions; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; @@ -54,6 +54,6 @@ protected void assertSimpleOutput(List input, Block result) { @Override protected void assertOutputFromEmpty(Block b) { assertThat(b.getPositionCount(), equalTo(1)); - assertThat(BasicBlockTests.valuesAtPositions(b, 0, 1), equalTo(List.of(List.of(0L)))); + assertThat(valuesAtPositions(b, 0, 1), equalTo(List.of(List.of(0L)))); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java index 9b6bc6be23e62..a3e7a6a6d70f5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunctionTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.aggregation; -import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; @@ -18,6 +17,7 @@ import java.util.List; import java.util.stream.LongStream; +import static org.elasticsearch.compute.test.BlockTestUtils.valuesAtPositions; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; @@ -51,6 +51,6 @@ protected void assertSimpleOutput(List input, Block result) { @Override protected void assertOutputFromEmpty(Block b) { assertThat(b.getPositionCount(), equalTo(1)); - assertThat(BasicBlockTests.valuesAtPositions(b, 0, 1), equalTo(List.of(List.of(0L)))); + assertThat(valuesAtPositions(b, 0, 1), equalTo(List.of(List.of(0L)))); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionTests.java index 7f520de393d73..bbd61455a3053 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctFloatAggregatorFunctionTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.compute.aggregation; -import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; @@ -18,6 +17,7 @@ import java.util.List; import java.util.stream.LongStream; +import static org.elasticsearch.compute.test.BlockTestUtils.valuesAtPositions; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; @@ -51,6 +51,6 @@ protected void assertSimpleOutput(List input, Block result) { @Override protected void assertOutputFromEmpty(Block b) { assertThat(b.getPositionCount(), equalTo(1)); - assertThat(BasicBlockTests.valuesAtPositions(b, 0, 1), equalTo(List.of(List.of(0L)))); + assertThat(valuesAtPositions(b, 0, 1), equalTo(List.of(List.of(0L)))); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java index 7bfbb2f70a5a4..5bd9ecc931cf2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunctionTests.java @@ -8,21 +8,21 @@ package org.elasticsearch.compute.aggregation; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.PageConsumerOperator; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.CannedSourceOperator; import java.util.List; import java.util.stream.LongStream; +import static org.elasticsearch.compute.test.BlockTestUtils.valuesAtPositions; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; @@ -57,7 +57,7 @@ protected void assertSimpleOutput(List input, Block result) { @Override protected void assertOutputFromEmpty(Block b) { assertThat(b.getPositionCount(), equalTo(1)); - assertThat(BasicBlockTests.valuesAtPositions(b, 0, 1), equalTo(List.of(List.of(0L)))); + assertThat(valuesAtPositions(b, 0, 1), equalTo(List.of(List.of(0L)))); } public void testRejectsDouble() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java index 4df611a41a8dc..70662efae688f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunctionTests.java @@ -8,21 +8,21 @@ package org.elasticsearch.compute.aggregation; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.PageConsumerOperator; -import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import java.util.List; import java.util.stream.LongStream; +import static org.elasticsearch.compute.test.BlockTestUtils.valuesAtPositions; import static org.hamcrest.Matchers.closeTo; import static org.hamcrest.Matchers.equalTo; @@ -58,7 +58,7 @@ protected void assertSimpleOutput(List input, Block result) { @Override protected void assertOutputFromEmpty(Block b) { assertThat(b.getPositionCount(), equalTo(1)); - assertThat(BasicBlockTests.valuesAtPositions(b, 0, 1), equalTo(List.of(List.of(0L)))); + assertThat(valuesAtPositions(b, 0, 1), equalTo(List.of(List.of(0L)))); } public void testRejectsDouble() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java index 58925a5ca36fc..ff96336dc0bb4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/GroupingAggregatorFunctionTestCase.java @@ -14,7 +14,6 @@ import org.elasticsearch.compute.aggregation.blockhash.BlockHash; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -25,9 +24,7 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.compute.operator.AddGarbageRowsSourceOperator; -import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.ForkingOperatorTestCase; import org.elasticsearch.compute.operator.HashAggregationOperator; @@ -35,6 +32,9 @@ import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.PositionMergingSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.BlockTestUtils; +import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -51,7 +51,7 @@ import java.util.stream.Stream; import static java.util.stream.IntStream.range; -import static org.elasticsearch.compute.data.BlockTestUtils.append; +import static org.elasticsearch.compute.test.BlockTestUtils.append; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java index 8b9c9c1c39b8b..27a6fb0660461 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunctionTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import java.util.List; import java.util.stream.LongStream; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java index a3db32955d28c..0f570adfc6fd8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunctionTests.java @@ -11,8 +11,8 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import java.util.Arrays; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java index 00920be73117a..2ce7aab455c53 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/MinLongAggregatorFunctionTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import java.util.List; import java.util.stream.LongStream; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java index 46cc40084b6e6..664fe1edc6ad9 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunctionTests.java @@ -11,8 +11,8 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import org.elasticsearch.search.aggregations.metrics.TDigestState; import org.junit.Before; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java index 3f292ff0f81a6..003dc415c6194 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunctionTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.SequenceDoubleBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; -import org.elasticsearch.compute.operator.TestResultPageSinkOperator; +import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java index d365f02d289c8..521c1e261cc62 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumFloatAggregatorFunctionTests.java @@ -15,7 +15,7 @@ import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.SequenceFloatBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; -import org.elasticsearch.compute.operator.TestResultPageSinkOperator; +import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java index cf364943e1d11..8c5e4430128b7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumIntAggregatorFunctionTests.java @@ -12,12 +12,12 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.PageConsumerOperator; import org.elasticsearch.compute.operator.SequenceIntBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.CannedSourceOperator; import java.util.List; import java.util.stream.LongStream; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java index 7fd3cabb2c91e..00cdbedef54d6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/SumLongAggregatorFunctionTests.java @@ -12,12 +12,12 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.PageConsumerOperator; -import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import java.util.List; import java.util.stream.LongStream; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionTests.java index 7ec1fb9c53053..cb42be67844dc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/TopLongAggregatorFunctionTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; -import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import java.util.List; import java.util.stream.LongStream; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java index 4b01603b3768d..933058d8d8e13 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/ValuesLongAggregatorFunctionTests.java @@ -10,8 +10,8 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; -import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import java.util.Arrays; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java index 42ac4cf2ff917..990827b3dc693 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashRandomizedTests.java @@ -17,18 +17,18 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; -import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.OrdinalBytesRefBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeTests; +import org.elasticsearch.compute.test.BlockTestUtils; +import org.elasticsearch.compute.test.MockBlockFactory; +import org.elasticsearch.compute.test.RandomBlock; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; @@ -49,6 +49,7 @@ import java.util.TreeSet; import java.util.stream.Stream; +import static org.elasticsearch.compute.test.BlockTestUtils.valuesAtPositions; import static org.elasticsearch.test.ListMatcher.matchesList; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.hamcrest.Matchers.equalTo; @@ -118,7 +119,7 @@ interface Type { /** * Build a random {@link Block}. */ - BasicBlockTests.RandomBlock randomBlock(int positionCount, int maxValuesPerPosition, int dups); + RandomBlock randomBlock(int positionCount, int maxValuesPerPosition, int dups); } private final boolean forcePackedHash; @@ -163,7 +164,7 @@ public void testWithCranky() { private void test(MockBlockFactory blockFactory) { List types = randomList(groups, groups, () -> randomFrom(allowedTypes)); List elementTypes = types.stream().map(Type::elementType).toList(); - BasicBlockTests.RandomBlock[] randomBlocks = new BasicBlockTests.RandomBlock[types.size()]; + RandomBlock[] randomBlocks = new RandomBlock[types.size()]; Block[] blocks = new Block[types.size()]; int pageCount = between(1, groups < 10 ? 10 : 5); int positionCount = 100; @@ -216,7 +217,7 @@ private void test(MockBlockFactory blockFactory) { if (keyBlock.isNull(p)) { key.add(null); } else { - key.add(BasicBlockTests.valuesAtPositions(keyBlock, p, p + 1).get(0).get(0)); + key.add(valuesAtPositions(keyBlock, p, p + 1).get(0).get(0)); assertThat(keyBlock.getValueCount(p), equalTo(1)); } } @@ -322,7 +323,7 @@ private static List readKey(Block[] keyBlocks, int position) { List key = new ArrayList<>(keyBlocks.length); for (Block block : keyBlocks) { assertThat(block.getValueCount(position), lessThan(2)); - List v = BasicBlockTests.valuesAtPositions(block, position, position + 1).get(0); + List v = valuesAtPositions(block, position, position + 1).get(0); key.add(v == null ? null : v.get(0)); } return key; @@ -351,7 +352,7 @@ private static List> readKeys(Block[] keyBlocks, int position) { List> keys = new ArrayList<>(); keys.add(List.of()); for (Block block : keyBlocks) { - List values = BasicBlockTests.valuesAtPositions(block, position, position + 1).get(0); + List values = valuesAtPositions(block, position, position + 1).get(0); List> newKeys = new ArrayList<>(); for (Object v : values == null ? Collections.singletonList(null) : values) { for (List k : keys) { @@ -400,18 +401,18 @@ private Oracle(boolean collectsNullLongs) { this.collectsNullLongs = collectsNullLongs; } - void add(BasicBlockTests.RandomBlock[] randomBlocks) { + void add(RandomBlock[] randomBlocks) { for (int p = 0; p < randomBlocks[0].block().getPositionCount(); p++) { add(randomBlocks, p, List.of()); } } - void add(BasicBlockTests.RandomBlock[] randomBlocks, int p, List key) { + void add(RandomBlock[] randomBlocks, int p, List key) { if (key.size() == randomBlocks.length) { keys.add(key); return; } - BasicBlockTests.RandomBlock block = randomBlocks[key.size()]; + RandomBlock block = randomBlocks[key.size()]; List values = block.values().get(p); if (values == null) { if (block.block().elementType() != ElementType.LONG || collectsNullLongs) { @@ -454,8 +455,8 @@ public static Object randomKeyElement(ElementType type) { private record Basic(ElementType elementType) implements Type { @Override - public BasicBlockTests.RandomBlock randomBlock(int positionCount, int maxValuesPerPosition, int dups) { - return BasicBlockTests.randomBlock( + public RandomBlock randomBlock(int positionCount, int maxValuesPerPosition, int dups) { + return RandomBlock.randomBlock( elementType, positionCount, elementType == ElementType.NULL | randomBoolean(), @@ -474,7 +475,7 @@ public ElementType elementType() { } @Override - public BasicBlockTests.RandomBlock randomBlock(int positionCount, int maxValuesPerPosition, int dups) { + public RandomBlock randomBlock(int positionCount, int maxValuesPerPosition, int dups) { Map dictionary = new HashMap<>(); Set keys = dictionary(maxValuesPerPosition); List> values = new ArrayList<>(positionCount); @@ -510,7 +511,7 @@ public BasicBlockTests.RandomBlock randomBlock(int positionCount, int maxValuesP ordinals.endPositionEntry(); } } - return new BasicBlockTests.RandomBlock(values, new OrdinalBytesRefBlock(ordinals.build(), bytes.build())); + return new RandomBlock(values, new OrdinalBytesRefBlock(ordinals.build(), bytes.build())); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTestCase.java index fa93c0aa1c375..5cd9120e05305 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTestCase.java @@ -12,7 +12,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; -import org.elasticsearch.compute.data.MockBlockFactory; +import org.elasticsearch.compute.test.MockBlockFactory; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java index ede2d68ca2367..431b82db322f5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/BlockHashTests.java @@ -25,7 +25,7 @@ import org.elasticsearch.compute.data.OrdinalBytesRefBlock; import org.elasticsearch.compute.data.OrdinalBytesRefVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.TestBlockFactory; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java index 587deda650a23..914d29bb8ba25 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizeBlockHashTests.java @@ -30,12 +30,12 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.LocalSourceOperator; import org.elasticsearch.compute.operator.PageConsumerOperator; +import org.elasticsearch.compute.test.CannedSourceOperator; import org.elasticsearch.core.Releasables; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; @@ -53,7 +53,7 @@ import java.util.stream.Collectors; import java.util.stream.IntStream; -import static org.elasticsearch.compute.operator.OperatorTestCase.runDriver; +import static org.elasticsearch.compute.test.OperatorTestCase.runDriver; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java index cfa023af3d18a..5f868f51f06e2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/blockhash/CategorizePackedValuesBlockHashTests.java @@ -25,12 +25,12 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.HashAggregationOperator; import org.elasticsearch.compute.operator.LocalSourceOperator; import org.elasticsearch.compute.operator.PageConsumerOperator; +import org.elasticsearch.compute.test.CannedSourceOperator; import org.elasticsearch.core.Releasables; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; @@ -48,7 +48,7 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.compute.operator.OperatorTestCase.runDriver; +import static org.elasticsearch.compute.test.OperatorTestCase.runDriver; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/table/RowInTableLookupRandomizedTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/table/RowInTableLookupRandomizedTests.java index ebd588283ac07..25fa920ca4f3c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/table/RowInTableLookupRandomizedTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/table/RowInTableLookupRandomizedTests.java @@ -21,9 +21,9 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.mvdedupe.MultivalueDedupeTests; +import org.elasticsearch.compute.test.MockBlockFactory; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; @@ -42,7 +42,7 @@ import java.util.TreeSet; import java.util.stream.IntStream; -import static org.elasticsearch.compute.data.BlockTestUtils.append; +import static org.elasticsearch.compute.test.BlockTestUtils.append; import static org.hamcrest.Matchers.any; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/table/RowInTableLookupTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/table/RowInTableLookupTests.java index c029f54c171cd..57a1d4abeb8af 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/table/RowInTableLookupTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/aggregation/table/RowInTableLookupTests.java @@ -16,7 +16,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.MockBlockFactory; +import org.elasticsearch.compute.test.MockBlockFactory; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.junit.After; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java index 33a294131c996..7192146939ec5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicBlockTests.java @@ -18,13 +18,14 @@ import org.elasticsearch.common.util.LongArray; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.test.BlockTestUtils; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geo.ShapeTestUtils; -import org.elasticsearch.geometry.Point; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.junit.After; @@ -41,6 +42,7 @@ import java.util.stream.LongStream; import static java.util.Collections.singletonList; +import static org.elasticsearch.compute.test.BlockTestUtils.valuesAtPositions; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.GEO; import static org.hamcrest.Matchers.containsString; @@ -1184,189 +1186,6 @@ public void testToStringSmall() { } } - public static List> valuesAtPositions(Block block, int from, int to) { - List> result = new ArrayList<>(to - from); - for (int p = from; p < to; p++) { - if (block.isNull(p)) { - result.add(null); - continue; - } - int count = block.getValueCount(p); - List positionValues = new ArrayList<>(count); - int i = block.getFirstValueIndex(p); - for (int v = 0; v < count; v++) { - positionValues.add(switch (block.elementType()) { - case INT -> ((IntBlock) block).getInt(i++); - case LONG -> ((LongBlock) block).getLong(i++); - case FLOAT -> ((FloatBlock) block).getFloat(i++); - case DOUBLE -> ((DoubleBlock) block).getDouble(i++); - case BYTES_REF -> ((BytesRefBlock) block).getBytesRef(i++, new BytesRef()); - case BOOLEAN -> ((BooleanBlock) block).getBoolean(i++); - default -> throw new IllegalArgumentException("unsupported element type [" + block.elementType() + "]"); - }); - } - result.add(positionValues); - } - return result; - } - - public record RandomBlock(List> values, Block block) { - int valueCount() { - return values.stream().mapToInt(l -> l == null ? 0 : l.size()).sum(); - } - - /** - * Build a {@link RandomBlock} contain the values of two blocks, preserving the relative order. - */ - public BasicBlockTests.RandomBlock merge(BasicBlockTests.RandomBlock rhs) { - int estimatedSize = values().size() + rhs.values().size(); - int l = 0; - int r = 0; - List> mergedValues = new ArrayList<>(estimatedSize); - try (Block.Builder mergedBlock = block.elementType().newBlockBuilder(estimatedSize, block.blockFactory())) { - while (l < values.size() && r < rhs.values.size()) { - if (randomBoolean()) { - mergedValues.add(values.get(l)); - mergedBlock.copyFrom(block, l, l + 1); - l++; - } else { - mergedValues.add(rhs.values.get(r)); - mergedBlock.copyFrom(rhs.block, r, r + 1); - r++; - } - } - while (l < values.size()) { - mergedValues.add(values.get(l)); - mergedBlock.copyFrom(block, l, l + 1); - l++; - } - while (r < rhs.values.size()) { - mergedValues.add(rhs.values.get(r)); - mergedBlock.copyFrom(rhs.block, r, r + 1); - r++; - } - return new BasicBlockTests.RandomBlock(mergedValues, mergedBlock.build()); - } - } - - } - - public static RandomBlock randomBlock( - ElementType elementType, - int positionCount, - boolean nullAllowed, - int minValuesPerPosition, - int maxValuesPerPosition, - int minDupsPerPosition, - int maxDupsPerPosition - ) { - return randomBlock( - TestBlockFactory.getNonBreakingInstance(), - elementType, - positionCount, - nullAllowed, - minValuesPerPosition, - maxValuesPerPosition, - minDupsPerPosition, - maxDupsPerPosition - ); - } - - public static RandomBlock randomBlock( - BlockFactory blockFactory, - ElementType elementType, - int positionCount, - boolean nullAllowed, - int minValuesPerPosition, - int maxValuesPerPosition, - int minDupsPerPosition, - int maxDupsPerPosition - ) { - List> values = new ArrayList<>(); - Block.MvOrdering mvOrdering = Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING; - try (var builder = elementType.newBlockBuilder(positionCount, blockFactory)) { - boolean bytesRefFromPoints = randomBoolean(); - Supplier pointSupplier = randomBoolean() ? GeometryTestUtils::randomPoint : ShapeTestUtils::randomPoint; - for (int p = 0; p < positionCount; p++) { - if (elementType == ElementType.NULL) { - assert nullAllowed; - values.add(null); - builder.appendNull(); - continue; - } - int valueCount = between(minValuesPerPosition, maxValuesPerPosition); - if (valueCount == 0 || nullAllowed && randomBoolean()) { - values.add(null); - builder.appendNull(); - continue; - } - int dupCount = between(minDupsPerPosition, maxDupsPerPosition); - if (valueCount != 1 || dupCount != 0) { - builder.beginPositionEntry(); - } - List valuesAtPosition = new ArrayList<>(); - values.add(valuesAtPosition); - for (int v = 0; v < valueCount; v++) { - switch (elementType) { - case INT -> { - int i = randomInt(); - valuesAtPosition.add(i); - ((IntBlock.Builder) builder).appendInt(i); - } - case LONG -> { - long l = randomLong(); - valuesAtPosition.add(l); - ((LongBlock.Builder) builder).appendLong(l); - } - case FLOAT -> { - float f = randomFloat(); - valuesAtPosition.add(f); - ((FloatBlock.Builder) builder).appendFloat(f); - } - case DOUBLE -> { - double d = randomDouble(); - valuesAtPosition.add(d); - ((DoubleBlock.Builder) builder).appendDouble(d); - } - case BYTES_REF -> { - BytesRef b = bytesRefFromPoints - ? GEO.asWkb(pointSupplier.get()) - : new BytesRef(randomRealisticUnicodeOfLength(4)); - valuesAtPosition.add(b); - ((BytesRefBlock.Builder) builder).appendBytesRef(b); - } - case BOOLEAN -> { - boolean b = randomBoolean(); - valuesAtPosition.add(b); - ((BooleanBlock.Builder) builder).appendBoolean(b); - } - default -> throw new IllegalArgumentException("unsupported element type [" + elementType + "]"); - } - } - for (int i = 0; i < dupCount; i++) { - BlockTestUtils.append(builder, randomFrom(valuesAtPosition)); - } - if (valueCount != 1 || dupCount != 0) { - builder.endPositionEntry(); - } - if (dupCount > 0) { - mvOrdering = Block.MvOrdering.UNORDERED; - } else if (mvOrdering != Block.MvOrdering.UNORDERED) { - List dedupedAndSortedList = valuesAtPosition.stream().sorted().distinct().toList(); - if (dedupedAndSortedList.size() != valuesAtPosition.size()) { - mvOrdering = Block.MvOrdering.UNORDERED; - } else if (dedupedAndSortedList.equals(valuesAtPosition) == false) { - mvOrdering = Block.MvOrdering.DEDUPLICATED_UNORDERD; - } - } - } - if (randomBoolean()) { - builder.mvOrdering(mvOrdering); - } - return new RandomBlock(values, builder.build()); - } - } - interface BlockBuilderFactory { B create(int estimatedSize); } @@ -1963,14 +1782,4 @@ static BooleanVector randomMask(int positions) { return builder.build(); } } - - /** - * A random {@link ElementType} for which we can build a {@link RandomBlock}. - */ - public static ElementType randomElementType() { - return randomValueOtherThanMany( - e -> e == ElementType.UNKNOWN || e == ElementType.NULL || e == ElementType.DOC || e == ElementType.COMPOSITE, - () -> randomFrom(ElementType.values()) - ); - } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java index e0cf277e99967..99b24d39fe8b1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java @@ -11,6 +11,7 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.EqualsHashCodeTestUtils; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java index 86bfec5120945..248dc23c09269 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.util.BigArray; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefArray; -import org.elasticsearch.compute.operator.ComputeTestCase; +import org.elasticsearch.compute.test.ComputeTestCase; import org.elasticsearch.core.Releasables; import org.hamcrest.Matcher; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java index dc12a78954c5e..679e3441fb45f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java @@ -10,6 +10,8 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.elasticsearch.compute.test.RandomBlock; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; @@ -17,6 +19,7 @@ import java.util.stream.IntStream; import static org.elasticsearch.compute.data.BlockValueAsserter.assertBlockValues; +import static org.elasticsearch.compute.test.BlockTestUtils.valuesAtPositions; public class BlockBuilderCopyFromTests extends ESTestCase { @ParametersFactory @@ -81,7 +84,7 @@ private void assertSmall(Block block) { BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); Block.Builder builder = elementType.newBlockBuilder(smallSize, blockFactory); builder.copyFrom(block, 0, smallSize); - assertBlockValues(builder.build(), BasicBlockTests.valuesAtPositions(block, 0, smallSize)); + assertBlockValues(builder.build(), valuesAtPositions(block, 0, smallSize)); } private void assertEvens(Block block) { @@ -90,15 +93,14 @@ private void assertEvens(Block block) { List> expected = new ArrayList<>(); for (int i = 0; i < block.getPositionCount(); i += 2) { builder.copyFrom(block, i, i + 1); - expected.add(BasicBlockTests.valuesAtPositions(block, i, i + 1).get(0)); + expected.add(valuesAtPositions(block, i, i + 1).get(0)); } assertBlockValues(builder.build(), expected); } private Block randomBlock() { int positionCount = randomIntBetween(1, 16 * 1024); - return BasicBlockTests.randomBlock(elementType, positionCount, nullAllowed, minValuesPerPosition, maxValuesPerPosition, 0, 0) - .block(); + return RandomBlock.randomBlock(elementType, positionCount, nullAllowed, minValuesPerPosition, maxValuesPerPosition, 0, 0).block(); } private Block randomFilteredBlock() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java index eb2c750e3b2d7..c4e30d6766724 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.test.RandomBlock; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.test.ESTestCase; @@ -118,7 +119,7 @@ public void testBuildSingle() { private void testBuild(int size, boolean nullable, int maxValueCount) { try (Block.Builder builder = elementType.newBlockBuilder(randomBoolean() ? size : 1, blockFactory)) { - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, size, nullable, 1, maxValueCount, 0, 0); + RandomBlock random = RandomBlock.randomBlock(elementType, size, nullable, 1, maxValueCount, 0, 0); builder.copyFrom(random.block(), 0, random.block().getPositionCount()); assertThat( builder.estimatedBytes(), @@ -135,7 +136,7 @@ private void testBuild(int size, boolean nullable, int maxValueCount) { public void testDoubleBuild() { try (Block.Builder builder = elementType.newBlockBuilder(10, blockFactory)) { - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + RandomBlock random = RandomBlock.randomBlock(elementType, 10, false, 1, 1, 0, 0); builder.copyFrom(random.block(), 0, random.block().getPositionCount()); try (Block built = builder.build()) { assertThat(built, equalTo(random.block())); @@ -154,7 +155,7 @@ public void testCranky() { for (int i = 0; i < 100; i++) { try { try (Block.Builder builder = elementType.newBlockBuilder(10, blockFactory)) { - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + RandomBlock random = RandomBlock.randomBlock(elementType, 10, false, 1, 1, 0, 0); builder.copyFrom(random.block(), 0, random.block().getPositionCount()); try (Block built = builder.build()) { assertThat(built, equalTo(random.block())); @@ -175,7 +176,7 @@ public void testCrankyConstantBlock() { for (int i = 0; i < 100; i++) { try { try (Block.Builder builder = elementType.newBlockBuilder(randomInt(10), blockFactory)) { - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 1, false, 1, 1, 0, 0); + RandomBlock random = RandomBlock.randomBlock(elementType, 1, false, 1, 1, 0, 0); builder.copyFrom(random.block(), 0, random.block().getPositionCount()); try (Block built = builder.build()) { assertThat(built.asVector().isConstant(), is(true)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java index 5d5eef1fe3c07..4a62136d9c8e4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.test.RandomBlock; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.breaker.CircuitBreakerService; @@ -672,7 +673,7 @@ public void testParent() { } private Block randomBlock(BlockFactory blockFactory, int positionCount) { - return BasicBlockTests.randomBlock( + return RandomBlock.randomBlock( blockFactory, randomFrom(ElementType.BYTES_REF, ElementType.LONG, ElementType.BOOLEAN), positionCount, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java index da7b8cd87db7d..6d52dde9ad57f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockMultiValuedTests.java @@ -18,6 +18,9 @@ import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.compute.test.BlockTestUtils; +import org.elasticsearch.compute.test.MockBlockFactory; +import org.elasticsearch.compute.test.RandomBlock; import org.elasticsearch.core.ReleasableIterator; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.ESTestCase; @@ -30,6 +33,7 @@ import java.util.stream.IntStream; import static org.elasticsearch.compute.data.BasicBlockTests.assertInsertNulls; +import static org.elasticsearch.compute.test.BlockTestUtils.valuesAtPositions; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.nullValue; @@ -59,7 +63,7 @@ public BlockMultiValuedTests(@Name("elementType") ElementType elementType, @Name public void testMultiValued() { int positionCount = randomIntBetween(1, 16 * 1024); - var b = BasicBlockTests.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 0, 10, 0, 0); + var b = RandomBlock.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 0, 10, 0, 0); try { assertThat(b.block().getPositionCount(), equalTo(positionCount)); assertThat(b.block().getTotalValueCount(), equalTo(b.valueCount())); @@ -77,7 +81,7 @@ public void testMultiValued() { public void testExpand() { int positionCount = randomIntBetween(1, 16 * 1024); - var b = BasicBlockTests.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 0, 100, 0, 0); + var b = RandomBlock.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 0, 100, 0, 0); assertExpanded(b.block()); } @@ -130,7 +134,7 @@ public void testToMask() { return; } int positionCount = randomIntBetween(1, 16 * 1024); - var b = BasicBlockTests.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 2, 10, 0, 0); + var b = RandomBlock.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 2, 10, 0, 0); try (ToMask mask = ((BooleanBlock) b.block()).toMask()) { assertThat(mask.hadMultivaluedFields(), equalTo(true)); for (int p = 0; p < b.values().size(); p++) { @@ -152,14 +156,14 @@ public void testToMask() { public void testMask() { int positionCount = randomIntBetween(1, 16 * 1024); - var b = BasicBlockTests.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 0, 10, 0, 0); + var b = RandomBlock.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 0, 10, 0, 0); try ( BooleanVector mask = BasicBlockTests.randomMask(b.values().size() + between(0, 1000)); Block masked = b.block().keepMask(mask) ) { for (int p = 0; p < b.values().size(); p++) { List inputValues = b.values().get(p); - List valuesAtPosition = BasicBlockTests.valuesAtPositions(masked, p, p + 1).get(0); + List valuesAtPosition = valuesAtPositions(masked, p, p + 1).get(0); if (inputValues == null || mask.getBoolean(p) == false) { assertThat(masked.isNull(p), equalTo(true)); assertThat(valuesAtPosition, nullValue()); @@ -175,7 +179,7 @@ public void testMask() { public void testInsertNull() { int positionCount = randomIntBetween(1, 16 * 1024); - var b = BasicBlockTests.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 2, 10, 0, 0); + var b = RandomBlock.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 2, 10, 0, 0); try { assertInsertNulls(b.block()); } finally { @@ -185,7 +189,7 @@ public void testInsertNull() { private void assertFiltered(boolean all, boolean shuffled) { int positionCount = randomIntBetween(1, 16 * 1024); - var b = BasicBlockTests.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 0, 10, 0, 0); + var b = RandomBlock.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 0, 10, 0, 0); try { int[] positions = randomFilterPositions(b.block(), all, shuffled); Block filtered = b.block().filter(positions); @@ -206,7 +210,7 @@ private void assertFiltered(boolean all, boolean shuffled) { assertThat(filtered.isNull(r), equalTo(true)); } else { assertThat(filtered.getValueCount(r), equalTo(b.values().get(positions[r]).size())); - assertThat(BasicBlockTests.valuesAtPositions(filtered, r, r + 1).get(0), equalTo(b.values().get(positions[r]))); + assertThat(valuesAtPositions(filtered, r, r + 1).get(0), equalTo(b.values().get(positions[r]))); } } } finally { @@ -242,11 +246,11 @@ private void assertExpanded(Block orig) { assertThat(expanded.getValueCount(np++), equalTo(0)); continue; } - List oValues = BasicBlockTests.valuesAtPositions(orig, op, op + 1).get(0); + List oValues = valuesAtPositions(orig, op, op + 1).get(0); for (Object ov : oValues) { assertThat(expanded.isNull(np), equalTo(false)); assertThat(expanded.getValueCount(np), equalTo(1)); - assertThat(BasicBlockTests.valuesAtPositions(expanded, np, ++np).get(0), equalTo(List.of(ov))); + assertThat(valuesAtPositions(expanded, np, ++np).get(0), equalTo(List.of(ov))); } } } @@ -254,7 +258,7 @@ private void assertExpanded(Block orig) { private void assertFilteredThenExpanded(boolean all, boolean shuffled) { int positionCount = randomIntBetween(1, 16 * 1024); - var b = BasicBlockTests.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 0, 10, 0, 0); + var b = RandomBlock.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 0, 10, 0, 0); try { int[] positions = randomFilterPositions(b.block(), all, shuffled); assertExpanded(b.block().filter(positions)); @@ -296,7 +300,7 @@ public void allBreakersEmpty() throws Exception { private void assertLookup(ByteSizeValue targetBytes, int positionsToCopy, IntUnaryOperator positionsPerPosition) { BlockFactory positionsFactory = blockFactory(); int positionCount = randomIntBetween(100, 16 * 1024); - var b = BasicBlockTests.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 0, 100, 0, 0); + var b = RandomBlock.randomBlock(blockFactory(), elementType, positionCount, nullAllowed, 0, 100, 0, 0); try (IntBlock.Builder builder = positionsFactory.newIntBlockBuilder(positionsToCopy);) { for (int p = 0; p < positionsToCopy; p++) { int max = positionsPerPosition.applyAsInt(p); @@ -338,7 +342,7 @@ private void assertLookup(ByteSizeValue targetBytes, int positionsToCopy, IntUna for (int i = start; i < end; i++) { int toCopy = positions.getInt(i); if (toCopy < b.block().getPositionCount()) { - List v = BasicBlockTests.valuesAtPositions(b.block(), toCopy, toCopy + 1).get(0); + List v = valuesAtPositions(b.block(), toCopy, toCopy + 1).get(0); if (v != null) { expected.addAll(v); } @@ -348,10 +352,7 @@ private void assertLookup(ByteSizeValue targetBytes, int positionsToCopy, IntUna assertThat(copy.isNull(p - positionOffset), equalTo(true)); } else { assertThat(copy.isNull(p - positionOffset), equalTo(false)); - assertThat( - BasicBlockTests.valuesAtPositions(copy, p - positionOffset, p + 1 - positionOffset).get(0), - equalTo(expected) - ); + assertThat(valuesAtPositions(copy, p - positionOffset, p + 1 - positionOffset).get(0), equalTo(expected)); } } assertThat(lookup.hasNext(), equalTo(false)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java index 5a439becd4757..1e63f4f2be6fe 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java @@ -17,6 +17,8 @@ import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.aggregation.SumLongAggregatorFunction; import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.test.RandomBlock; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.EqualsHashCodeTestUtils; @@ -374,8 +376,7 @@ public void testCompositeBlock() throws Exception { final Block[] blocks = new Block[numBlocks]; for (int b = 0; b < numBlocks; b++) { ElementType elementType = randomFrom(ElementType.LONG, ElementType.DOUBLE, ElementType.BOOLEAN, ElementType.NULL); - blocks[b] = BasicBlockTests.randomBlock(blockFactory, elementType, positionCount, true, 0, between(1, 2), 0, between(1, 2)) - .block(); + blocks[b] = RandomBlock.randomBlock(blockFactory, elementType, positionCount, true, 0, between(1, 2), 0, between(1, 2)).block(); } try (CompositeBlock origBlock = new CompositeBlock(blocks)) { assertThat(origBlock.getBlockCount(), equalTo(numBlocks)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java index 3e2322200dcf0..7c0c4c48e97de 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.data; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.test.ESTestCase; import java.util.BitSet; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java index 6b3fa5df9b9ff..ddf8b1a28bc26 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java @@ -12,7 +12,8 @@ import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; -import org.elasticsearch.compute.operator.ComputeTestCase; +import org.elasticsearch.compute.test.ComputeTestCase; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import java.util.Arrays; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/CompositeBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/CompositeBlockTests.java index 8df2e27827b48..ed7eded6eeda4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/CompositeBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/CompositeBlockTests.java @@ -7,7 +7,8 @@ package org.elasticsearch.compute.data; -import org.elasticsearch.compute.operator.ComputeTestCase; +import org.elasticsearch.compute.test.ComputeTestCase; +import org.elasticsearch.compute.test.RandomBlock; import java.util.Arrays; import java.util.List; @@ -24,7 +25,7 @@ public static CompositeBlock randomCompositeBlock(BlockFactory blockFactory, int Block[] blocks = new Block[numBlocks]; for (int b = 0; b < numBlocks; b++) { ElementType elementType = randomFrom(supportedSubElementTypes); - blocks[b] = BasicBlockTests.randomBlock( + blocks[b] = RandomBlock.randomBlock( blockFactory, elementType, positionCount, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java index 2f9cf6ec57775..78192d6363d48 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java @@ -10,7 +10,8 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.unit.ByteSizeValue; -import org.elasticsearch.compute.operator.ComputeTestCase; +import org.elasticsearch.compute.test.ComputeTestCase; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.BreakerTestUtil; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java index c0a3b3b8ac751..0bdcd8a29add9 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java @@ -7,7 +7,8 @@ package org.elasticsearch.compute.data; -import org.elasticsearch.compute.operator.ComputeTestCase; +import org.elasticsearch.compute.test.ComputeTestCase; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Releasables; import java.util.BitSet; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FloatBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FloatBlockEqualityTests.java index 95e9349a18fee..046567ff5987d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FloatBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FloatBlockEqualityTests.java @@ -7,7 +7,8 @@ package org.elasticsearch.compute.data; -import org.elasticsearch.compute.operator.ComputeTestCase; +import org.elasticsearch.compute.test.ComputeTestCase; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Releasables; import java.util.BitSet; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java index 5beb091cbfaca..eb68bdf7a59d6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java @@ -7,7 +7,8 @@ package org.elasticsearch.compute.data; -import org.elasticsearch.compute.operator.ComputeTestCase; +import org.elasticsearch.compute.test.ComputeTestCase; +import org.elasticsearch.compute.test.TestBlockFactory; import java.util.BitSet; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java index 3e425439bb800..6d6a832d27e54 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java @@ -7,7 +7,8 @@ package org.elasticsearch.compute.data; -import org.elasticsearch.compute.operator.ComputeTestCase; +import org.elasticsearch.compute.test.ComputeTestCase; +import org.elasticsearch.compute.test.TestBlockFactory; import java.util.BitSet; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java index 31a7251a03307..f347efc533113 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.data; +import org.elasticsearch.compute.test.TestBlockBuilder; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.EqualsHashCodeTestUtils; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilderTests.java index ffe13375bc941..87f3210dc12df 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SingletonOrdinalsBuilderTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.test.MockBlockFactory; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.junit.After; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockFactory.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockFactory.java deleted file mode 100644 index 5b7072ab6476d..0000000000000 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockFactory.java +++ /dev/null @@ -1,26 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.data; - -import org.elasticsearch.common.breaker.NoopCircuitBreaker; -import org.elasticsearch.common.util.BigArrays; - -public class TestBlockFactory { - - private static final BlockFactory NON_BREAKING = BlockFactory.getInstance( - new NoopCircuitBreaker("test-noop"), - BigArrays.NON_RECYCLING_INSTANCE - ); - - /** - * Returns the Non-Breaking block factory. - */ - public static BlockFactory getNonBreakingInstance() { - return NON_BREAKING; - } -} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java index 3ab02ac5488bc..75495518f7523 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.test.RandomBlock; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.test.ESTestCase; @@ -64,7 +65,7 @@ public void testBuildSingle() { private void testBuild(int size) { BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); try (Vector.Builder builder = vectorBuilder(randomBoolean() ? size : 1, blockFactory)) { - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, size, false, 1, 1, 0, 0); + RandomBlock random = RandomBlock.randomBlock(elementType, size, false, 1, 1, 0, 0); fill(builder, random.block().asVector()); try (Vector built = builder.build()) { assertThat(built, equalTo(random.block().asVector())); @@ -78,7 +79,7 @@ private void testBuild(int size) { public void testDoubleBuild() { BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); try (Vector.Builder builder = vectorBuilder(10, blockFactory)) { - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + RandomBlock random = RandomBlock.randomBlock(elementType, 10, false, 1, 1, 0, 0); fill(builder, random.block().asVector()); try (Vector built = builder.build()) { assertThat(built, equalTo(random.block().asVector())); @@ -96,7 +97,7 @@ public void testCranky() { for (int i = 0; i < 100; i++) { try { try (Vector.Builder builder = vectorBuilder(10, blockFactory)) { - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + RandomBlock random = RandomBlock.randomBlock(elementType, 10, false, 1, 1, 0, 0); fill(builder, random.block().asVector()); try (Vector built = builder.build()) { assertThat(built, equalTo(random.block().asVector())); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java index 1086280af9df0..63f4962a99523 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.test.RandomBlock; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.test.ESTestCase; @@ -67,7 +68,7 @@ public void testBuildSingle() { private void testBuild(int size) { BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); try (Vector.Builder builder = vectorBuilder(size, blockFactory)) { - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, size, false, 1, 1, 0, 0); + RandomBlock random = RandomBlock.randomBlock(elementType, size, false, 1, 1, 0, 0); fill(builder, random.block().asVector()); try (Vector built = builder.build()) { assertThat(built, equalTo(random.block().asVector())); @@ -81,7 +82,7 @@ private void testBuild(int size) { public void testDoubleBuild() { BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); try (Vector.Builder builder = vectorBuilder(10, blockFactory)) { - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + RandomBlock random = RandomBlock.randomBlock(elementType, 10, false, 1, 1, 0, 0); fill(builder, random.block().asVector()); try (Vector built = builder.build()) { assertThat(built, equalTo(random.block().asVector())); @@ -100,7 +101,7 @@ public void testCranky() { for (int i = 0; i < 100; i++) { try { Vector.Builder builder = vectorBuilder(10, blockFactory); - BasicBlockTests.RandomBlock random = BasicBlockTests.randomBlock(elementType, 10, false, 1, 1, 0, 0); + RandomBlock random = RandomBlock.randomBlock(elementType, 10, false, 1, 1, 0, 0); fill(builder, random.block().asVector()); try (Vector built = builder.build()) { assertThat(built, equalTo(random.block().asVector())); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java index 339c2bba2a734..78ed096c10b3f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/sort/BucketedSortTestCase.java @@ -16,7 +16,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntVector; -import org.elasticsearch.compute.data.TestBlockFactory; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Releasable; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.search.sort.SortOrder; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java index 72a1a4aa342df..1f5b5bf9b9337 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java @@ -19,11 +19,11 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.AnyOperatorTestCase; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.OperatorTestCase; -import org.elasticsearch.compute.operator.TestResultPageSinkOperator; +import org.elasticsearch.compute.test.AnyOperatorTestCase; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.hamcrest.Matcher; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java index f5214dccbd00c..b65da5aba7588 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMaxOperatorTestCase.java @@ -20,11 +20,11 @@ import org.elasticsearch.compute.aggregation.AggregatorFunction; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.AnyOperatorTestCase; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.OperatorTestCase; -import org.elasticsearch.compute.operator.TestResultPageSinkOperator; +import org.elasticsearch.compute.test.AnyOperatorTestCase; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.CrankyCircuitBreakerService; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java index 493512bd83bec..f57bbd8c5ddb5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneMinOperatorTestCase.java @@ -20,11 +20,11 @@ import org.elasticsearch.compute.aggregation.AggregatorFunction; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.AnyOperatorTestCase; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; -import org.elasticsearch.compute.operator.OperatorTestCase; -import org.elasticsearch.compute.operator.TestResultPageSinkOperator; +import org.elasticsearch.compute.test.AnyOperatorTestCase; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.CrankyCircuitBreakerService; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java index ffaee536b443e..6c978297b8497 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneQueryExpressionEvaluatorTests.java @@ -32,14 +32,14 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.LuceneQueryExpressionEvaluator.DenseCollector; -import org.elasticsearch.compute.operator.ComputeTestCase; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.Operator; -import org.elasticsearch.compute.operator.OperatorTestCase; import org.elasticsearch.compute.operator.ShuffleDocsOperator; -import org.elasticsearch.compute.operator.TestResultPageSinkOperator; +import org.elasticsearch.compute.test.ComputeTestCase; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.index.mapper.BlockDocValuesReader; @@ -51,7 +51,7 @@ import java.util.Set; import java.util.TreeSet; -import static org.elasticsearch.compute.operator.OperatorTestCase.randomPageSize; +import static org.elasticsearch.compute.test.OperatorTestCase.randomPageSize; import static org.hamcrest.Matchers.equalTo; public class LuceneQueryExpressionEvaluatorTests extends ComputeTestCase { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java index 2dcc5e20d3f98..b7114bb4e9b54 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java @@ -22,12 +22,12 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.AnyOperatorTestCase; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; -import org.elasticsearch.compute.operator.OperatorTestCase; -import org.elasticsearch.compute.operator.TestResultPageSinkOperator; +import org.elasticsearch.compute.test.AnyOperatorTestCase; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; import org.elasticsearch.index.mapper.MappedFieldType; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java index a0fa1c2c01c0a..20af40bcc6840 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorScoringTests.java @@ -25,8 +25,8 @@ import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; -import org.elasticsearch.compute.operator.OperatorTestCase; -import org.elasticsearch.compute.operator.TestResultPageSinkOperator; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java index d9a0b70b7931e..a6d652d499d84 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java @@ -25,12 +25,12 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.AnyOperatorTestCase; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; -import org.elasticsearch.compute.operator.OperatorTestCase; -import org.elasticsearch.compute.operator.TestResultPageSinkOperator; +import org.elasticsearch.compute.test.AnyOperatorTestCase; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java index 4863eea5d5ca3..feba401d445e7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/TimeSeriesSortedSourceOperatorTests.java @@ -33,12 +33,12 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.AnyOperatorTestCase; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; -import org.elasticsearch.compute.operator.OperatorTestCase; -import org.elasticsearch.compute.operator.TestResultPageSinkOperator; +import org.elasticsearch.compute.test.AnyOperatorTestCase; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.mapper.DataStreamTimestampFieldMapper; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java index f31573f121a71..910541607d83f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValueSourceReaderTypeConversionTests.java @@ -48,18 +48,18 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.TestBlockFactory; -import org.elasticsearch.compute.operator.AnyOperatorTestCase; -import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.DriverRunner; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.PageConsumerOperator; -import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; -import org.elasticsearch.compute.operator.TestResultPageSinkOperator; +import org.elasticsearch.compute.test.AnyOperatorTestCase; +import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; +import org.elasticsearch.compute.test.TestBlockFactory; +import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java index 95b313b0b5412..2661ff665831f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorTests.java @@ -45,13 +45,13 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; -import org.elasticsearch.compute.operator.OperatorTestCase; import org.elasticsearch.compute.operator.PageConsumerOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.OperatorTestCase; import org.elasticsearch.core.IOUtils; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.IndexVersion; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java index 38d83fe894170..5e16fce2af00b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AggregationOperatorTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import org.hamcrest.Matcher; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java index 38f25244cd917..f017fed16cc96 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AsyncOperatorTests.java @@ -26,8 +26,10 @@ import org.elasticsearch.compute.data.LocalCircuitBreaker; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; -import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.AbstractBlockSourceOperator; +import org.elasticsearch.compute.test.MockBlockFactory; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/BytesRefBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/BytesRefBlockSourceOperator.java index 77e6b4eb4d794..d95d1728ddfb7 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/BytesRefBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/BytesRefBlockSourceOperator.java @@ -11,6 +11,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.AbstractBlockSourceOperator; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java index 9c9e7a7933682..bd38967be7e34 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnExtractOperatorTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.OperatorTestCase; import org.hamcrest.Matcher; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnLoadOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnLoadOperatorTests.java index 47d6668cb7659..3bcecda2ff9e5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnLoadOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ColumnLoadOperatorTests.java @@ -11,7 +11,8 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.TestBlockFactory; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestBlockFactory; import org.hamcrest.Matcher; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java index a3af5aafcbee3..563e88ab4eeb1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.compute.data.TestBlockFactory; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java index b067c44a289b4..cc983e6b83fbe 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.common.util.set.Sets; -import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; @@ -27,6 +26,9 @@ import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; import org.elasticsearch.compute.operator.exchange.ExchangeSourceOperator; +import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.RandomBlock; +import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.FixedExecutorBuilder; @@ -358,7 +360,7 @@ private static void assertRunningWithRegularUser(ThreadPool threadPool) { } private static Page randomPage() { - BasicBlockTests.RandomBlock block = BasicBlockTests.randomBlock( + RandomBlock block = RandomBlock.randomBlock( randomFrom(ElementType.BOOLEAN, ElementType.INT, ElementType.BYTES_REF), between(1, 10), randomBoolean(), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java index 6f18fa59937f6..85f4804653157 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java @@ -13,6 +13,8 @@ import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.EvalOperator.EvalOperatorFactory; +import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.OperatorTestCase; import org.elasticsearch.core.Tuple; import org.hamcrest.Matcher; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java index 4bf9f3942c0a8..ce85d9baa5c7d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java @@ -14,6 +14,8 @@ import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.OperatorTestCase; import org.elasticsearch.core.Tuple; import org.hamcrest.Matcher; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java index 542bf5bc384a5..94a5299dd8216 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java @@ -14,13 +14,16 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.aggregation.AggregatorMode; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; import org.elasticsearch.compute.operator.exchange.ExchangeSourceOperator; +import org.elasticsearch.compute.test.BlockTestUtils; +import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestBlockFactory; +import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.threadpool.FixedExecutorBuilder; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/IteratorAppendPageTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/IteratorAppendPageTests.java index b6082bb52e72b..e9d9ce7c70fdf 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/IteratorAppendPageTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/IteratorAppendPageTests.java @@ -10,7 +10,9 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.TestBlockFactory; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.ReleasableIterator; import org.hamcrest.Matcher; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/IteratorRemovePageTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/IteratorRemovePageTests.java index 8059548a0ef0f..c682a2960b380 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/IteratorRemovePageTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/IteratorRemovePageTests.java @@ -9,6 +9,8 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import org.elasticsearch.core.ReleasableIterator; import org.hamcrest.Matcher; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java index bbe4a07cc44bd..b05be86a164aa 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LimitOperatorTests.java @@ -7,16 +7,18 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.RandomBlock; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; import org.hamcrest.Matcher; import java.util.List; import java.util.stream.LongStream; -import static org.elasticsearch.compute.data.BasicBlockTests.randomElementType; +import static org.elasticsearch.compute.test.RandomBlock.randomElementType; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.sameInstance; @@ -128,6 +130,6 @@ Block randomBlock(BlockFactory blockFactory, int size) { if (randomBoolean()) { return blockFactory.newConstantNullBlock(size); } - return BasicBlockTests.randomBlock(blockFactory, randomElementType(), size, false, 1, 1, 0, 0).block(); + return RandomBlock.randomBlock(blockFactory, randomElementType(), size, false, 1, 1, 0, 0).block(); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongBooleanTupleBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongBooleanTupleBlockSourceOperator.java index 575996e90eb50..0c8ea4e2aff1e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongBooleanTupleBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongBooleanTupleBlockSourceOperator.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.AbstractBlockSourceOperator; import org.elasticsearch.core.Tuple; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongBytesRefTupleBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongBytesRefTupleBlockSourceOperator.java index 06f9c649d7c63..767335a7beae5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongBytesRefTupleBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongBytesRefTupleBlockSourceOperator.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.AbstractBlockSourceOperator; import org.elasticsearch.core.Tuple; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongDoubleTupleBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongDoubleTupleBlockSourceOperator.java index fc27286ab74ac..f5037a08c2759 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongDoubleTupleBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongDoubleTupleBlockSourceOperator.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.AbstractBlockSourceOperator; import org.elasticsearch.core.Tuple; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongFloatTupleBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongFloatTupleBlockSourceOperator.java index 9276174c9dbb1..598dad9045be2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongFloatTupleBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongFloatTupleBlockSourceOperator.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.AbstractBlockSourceOperator; import org.elasticsearch.core.Tuple; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongIntBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongIntBlockSourceOperator.java index 224fa27779aea..a3e03b5d4e018 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongIntBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/LongIntBlockSourceOperator.java @@ -9,6 +9,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.AbstractBlockSourceOperator; import org.elasticsearch.core.Tuple; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java index b07ff8b0da571..d6abdac383e5c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java @@ -11,15 +11,18 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.TestBlockFactory; +import org.elasticsearch.compute.test.AbstractBlockSourceOperator; +import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestBlockFactory; import org.hamcrest.Matcher; import java.util.Iterator; import java.util.List; -import static org.elasticsearch.compute.data.BasicBlockTests.randomBlock; -import static org.elasticsearch.compute.data.BasicBlockTests.valuesAtPositions; -import static org.elasticsearch.compute.data.BlockTestUtils.deepCopyOf; +import static org.elasticsearch.compute.test.BlockTestUtils.deepCopyOf; +import static org.elasticsearch.compute.test.BlockTestUtils.valuesAtPositions; +import static org.elasticsearch.compute.test.RandomBlock.randomBlock; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OutputOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OutputOperatorTests.java index d794726da58ba..8a6e890afde2b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OutputOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OutputOperatorTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.operator; +import org.elasticsearch.compute.test.AnyOperatorTestCase; import org.hamcrest.Matcher; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java index 8cbdf7dfc7b4c..12b22a6244ae3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ProjectOperatorTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.OperatorTestCase; import org.elasticsearch.core.Tuple; import org.hamcrest.Matcher; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowInTableLookupOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowInTableLookupOperatorTests.java index 747309e3712e7..7522f57a3a9ee 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowInTableLookupOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowInTableLookupOperatorTests.java @@ -8,11 +8,14 @@ package org.elasticsearch.compute.operator; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.TestBlockFactory; +import org.elasticsearch.compute.test.BlockTestUtils; +import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Tuple; import org.hamcrest.Matcher; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java index 9665590940afe..97fb380b6aac1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.AbstractBlockSourceOperator; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBytesRefBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBytesRefBlockSourceOperator.java index 75e71ff697efb..c235e576736ef 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBytesRefBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBytesRefBlockSourceOperator.java @@ -10,6 +10,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.AbstractBlockSourceOperator; import java.util.List; import java.util.stream.Stream; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceDoubleBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceDoubleBlockSourceOperator.java index 0a5f03abd787a..f11d1f91cc0a4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceDoubleBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceDoubleBlockSourceOperator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DoubleVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.AbstractBlockSourceOperator; import java.util.List; import java.util.stream.DoubleStream; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceFloatBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceFloatBlockSourceOperator.java index db524366b381e..d73187baeddb3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceFloatBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceFloatBlockSourceOperator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.FloatVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.AbstractBlockSourceOperator; import java.util.List; import java.util.stream.IntStream; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceIntBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceIntBlockSourceOperator.java index 2795b6044a2c5..56b18fcea7506 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceIntBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceIntBlockSourceOperator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.AbstractBlockSourceOperator; import java.util.List; import java.util.stream.IntStream; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java index 40ef554bc2009..09105774388a2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.OperatorTestCase; import org.hamcrest.Matcher; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java index da1a9c9408f90..afd4695db932f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TimeSeriesAggregationOperatorTests.java @@ -24,6 +24,9 @@ import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperatorTests; +import org.elasticsearch.compute.test.ComputeTestCase; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.mapper.KeywordFieldMapper; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java index e41c82b89772e..b905de17608cb 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TupleBlockSourceOperator.java @@ -10,6 +10,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.AbstractBlockSourceOperator; import org.elasticsearch.core.Tuple; import java.util.List; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java index a006a984eb178..bd5b53fb25c8b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeBufferTests.java @@ -12,12 +12,12 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; -import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.MockBlockFactory; +import org.elasticsearch.compute.test.RandomBlock; import org.elasticsearch.test.ESTestCase; import java.util.concurrent.CountDownLatch; @@ -73,7 +73,7 @@ private static MockBlockFactory blockFactory() { } private static Page randomPage(BlockFactory blockFactory) { - Block block = BasicBlockTests.randomBlock( + Block block = RandomBlock.randomBlock( blockFactory, randomFrom(ElementType.LONG, ElementType.BYTES_REF, ElementType.BOOLEAN), randomIntBetween(1, 100), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeResponseTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeResponseTests.java index 982e1eb77e2f5..8a413b6379290 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeResponseTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeResponseTests.java @@ -8,12 +8,12 @@ package org.elasticsearch.compute.operator.exchange; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.ComputeTestCase; +import org.elasticsearch.compute.test.ComputeTestCase; +import org.elasticsearch.compute.test.RandomBlock; import static org.hamcrest.Matchers.equalTo; @@ -25,7 +25,7 @@ public void testReverseBytesWhenSerializing() throws Exception { Block[] blocks = new Block[numBlocks]; int positions = randomIntBetween(1, 10); for (int b = 0; b < numBlocks; b++) { - var block = BasicBlockTests.randomBlock( + var block = RandomBlock.randomBlock( factory, randomFrom(ElementType.BOOLEAN, ElementType.LONG, ElementType.BYTES_REF), positions, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 8f7532b582bc2..363ad9c49ddfe 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -26,13 +26,13 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.DriverRunner; import org.elasticsearch.compute.operator.SinkOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.MockBlockFactory; import org.elasticsearch.core.ReleasableRef; import org.elasticsearch.core.TimeValue; import org.elasticsearch.tasks.Task; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/RightChunkedLeftJoinTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/RightChunkedLeftJoinTests.java index 1312b772dbfa1..c62b95e3e816d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/RightChunkedLeftJoinTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/lookup/RightChunkedLeftJoinTests.java @@ -8,15 +8,15 @@ package org.elasticsearch.compute.operator.lookup; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.BytesRefVector; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.operator.ComputeTestCase; +import org.elasticsearch.compute.test.BlockTestUtils; +import org.elasticsearch.compute.test.ComputeTestCase; +import org.elasticsearch.compute.test.RandomBlock; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.ListMatcher; @@ -226,8 +226,8 @@ public void testRandomCranky() { private void testRandom(BlockFactory factory) { int leftSize = between(100, 10000); - ElementType[] leftColumns = randomArray(1, 10, ElementType[]::new, BasicBlockTests::randomElementType); - ElementType[] rightColumns = randomArray(1, 10, ElementType[]::new, BasicBlockTests::randomElementType); + ElementType[] leftColumns = randomArray(1, 10, ElementType[]::new, RandomBlock::randomElementType); + ElementType[] rightColumns = randomArray(1, 10, ElementType[]::new, RandomBlock::randomElementType); RandomPage left = randomPage(factory, leftColumns, leftSize); try (RightChunkedLeftJoin join = new RightChunkedLeftJoin(left.page, rightColumns.length)) { @@ -389,10 +389,10 @@ Object unwrapSingletonLists(Object o) { return o; } - record RandomPage(Page page, BasicBlockTests.RandomBlock[] blocks) {}; + record RandomPage(Page page, RandomBlock[] blocks) {}; RandomPage randomPage(BlockFactory factory, ElementType[] types, int positions, Block... prepend) { - BasicBlockTests.RandomBlock[] randomBlocks = new BasicBlockTests.RandomBlock[types.length]; + RandomBlock[] randomBlocks = new RandomBlock[types.length]; Block[] blocks = new Block[prepend.length + types.length]; try { for (int c = 0; c < prepend.length; c++) { @@ -401,16 +401,7 @@ RandomPage randomPage(BlockFactory factory, ElementType[] types, int positions, for (int c = 0; c < types.length; c++) { int min = between(0, 3); - randomBlocks[c] = BasicBlockTests.randomBlock( - factory, - types[c], - positions, - randomBoolean(), - min, - between(min, min + 3), - 0, - 0 - ); + randomBlocks[c] = RandomBlock.randomBlock(factory, types[c], positions, randomBoolean(), min, between(min, min + 3), 0, 0); blocks[prepend.length + c] = randomBlocks[c].block(); } Page p = new Page(blocks); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/mvdedupe/MultivalueDedupeTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/mvdedupe/MultivalueDedupeTests.java index e535c0dddd7c2..958c2a05e659d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/mvdedupe/MultivalueDedupeTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/mvdedupe/MultivalueDedupeTests.java @@ -18,17 +18,17 @@ import org.elasticsearch.common.util.LongHash; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; -import org.elasticsearch.compute.data.BasicBlockTests; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; -import org.elasticsearch.compute.data.TestBlockFactory; +import org.elasticsearch.compute.test.BlockTestUtils; +import org.elasticsearch.compute.test.RandomBlock; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matcher; @@ -46,6 +46,7 @@ import java.util.function.LongFunction; import java.util.stream.Collectors; +import static org.elasticsearch.compute.test.BlockTestUtils.valuesAtPositions; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; @@ -117,24 +118,24 @@ public MultivalueDedupeTests( public void testDedupeAdaptive() { BlockFactory blockFactory = blockFactory(); - BasicBlockTests.RandomBlock b = randomBlock(); + RandomBlock b = randomBlock(); assertDeduped(blockFactory, b, MultivalueDedupe.dedupeToBlockAdaptive(b.block(), blockFactory)); } public void testDedupeViaCopyAndSort() { BlockFactory blockFactory = blockFactory(); - BasicBlockTests.RandomBlock b = randomBlock(); + RandomBlock b = randomBlock(); assertDeduped(blockFactory, b, MultivalueDedupe.dedupeToBlockUsingCopyAndSort(b.block(), blockFactory)); } public void testDedupeViaCopyMissing() { BlockFactory blockFactory = blockFactory(); - BasicBlockTests.RandomBlock b = randomBlock(); + RandomBlock b = randomBlock(); assertDeduped(blockFactory, b, MultivalueDedupe.dedupeToBlockUsingCopyMissing(b.block(), blockFactory)); } - private BasicBlockTests.RandomBlock randomBlock() { - return BasicBlockTests.randomBlock( + private RandomBlock randomBlock() { + return RandomBlock.randomBlock( elementType, positionCount, elementType == ElementType.NULL ? true : nullAllowed, @@ -145,7 +146,7 @@ private BasicBlockTests.RandomBlock randomBlock() { ); } - private void assertDeduped(BlockFactory blockFactory, BasicBlockTests.RandomBlock b, Block dedupedBlock) { + private void assertDeduped(BlockFactory blockFactory, RandomBlock b, Block dedupedBlock) { try { if (dedupedBlock != b.block()) { assertThat(dedupedBlock.blockFactory(), sameInstance(blockFactory)); @@ -164,7 +165,7 @@ private void assertDeduped(BlockFactory blockFactory, BasicBlockTests.RandomBloc public void testHash() { assumeFalse("not hash for null", elementType == ElementType.NULL); - BasicBlockTests.RandomBlock b = randomBlock(); + RandomBlock b = randomBlock(); switch (b.block().elementType()) { case BOOLEAN -> assertBooleanHash(Set.of(), b); case BYTES_REF -> assertBytesRefHash(Set.of(), b); @@ -177,7 +178,7 @@ public void testHash() { public void testHashWithPreviousValues() { assumeFalse("not hash for null", elementType == ElementType.NULL); - BasicBlockTests.RandomBlock b = randomBlock(); + RandomBlock b = randomBlock(); switch (b.block().elementType()) { case BOOLEAN -> { Set previousValues = switch (between(0, 2)) { @@ -228,7 +229,7 @@ public void testHashWithPreviousValues() { public void testBatchEncodeAll() { assumeFalse("null only direct encodes", elementType == ElementType.NULL); int initCapacity = Math.toIntExact(ByteSizeValue.ofKb(10).getBytes()); - BasicBlockTests.RandomBlock b = randomBlock(); + RandomBlock b = randomBlock(); var encoder = (BatchEncoder.MVEncoder) MultivalueDedupe.batchEncoder(b.block(), initCapacity, false); int valueOffset = 0; @@ -246,7 +247,7 @@ public void testBatchEncodeAll() { public void testBatchEncoderStartSmall() { assumeFalse("Booleans don't grow in the same way", elementType == ElementType.BOOLEAN); assumeFalse("Nulls don't grow", elementType == ElementType.NULL); - BasicBlockTests.RandomBlock b = randomBlock(); + RandomBlock b = randomBlock(); var encoder = (BatchEncoder.MVEncoder) MultivalueDedupe.batchEncoder(b.block(), 0, false); /* @@ -274,7 +275,7 @@ public void testBatchEncoderStartSmall() { assertEncodedPosition(b, encoder, leadingNulls, 0, 0); } - private void assertBooleanHash(Set previousValues, BasicBlockTests.RandomBlock b) { + private void assertBooleanHash(Set previousValues, RandomBlock b) { boolean[] everSeen = new boolean[3]; if (previousValues.contains(false)) { everSeen[1] = true; @@ -294,7 +295,7 @@ private void assertBooleanHash(Set previousValues, BasicBlockTests.Rand } } - private void assertBytesRefHash(Set previousValues, BasicBlockTests.RandomBlock b) { + private void assertBytesRefHash(Set previousValues, RandomBlock b) { BytesRefHash hash = new BytesRefHash(1, BigArrays.NON_RECYCLING_INSTANCE); previousValues.forEach(hash::add); MultivalueDedupe.HashResult hashes = new MultivalueDedupeBytesRef((BytesRefBlock) b.block()).hashAdd(blockFactory(), hash); @@ -306,7 +307,7 @@ private void assertBytesRefHash(Set previousValues, BasicBlockTests.Ra assertThat(hash.size(), equalTo(sizeBeforeLookup)); assertLookup(previousValues, b, b, lookup, i -> hash.get(i, new BytesRef())); } - BasicBlockTests.RandomBlock other = randomBlock(); + RandomBlock other = randomBlock(); if (randomBoolean()) { other = b.merge(other); } @@ -318,7 +319,7 @@ private void assertBytesRefHash(Set previousValues, BasicBlockTests.Ra } } - private void assertIntHash(Set previousValues, BasicBlockTests.RandomBlock b) { + private void assertIntHash(Set previousValues, RandomBlock b) { LongHash hash = new LongHash(1, BigArrays.NON_RECYCLING_INSTANCE); previousValues.forEach(hash::add); MultivalueDedupe.HashResult hashes = new MultivalueDedupeInt((IntBlock) b.block()).hashAdd(blockFactory(), hash); @@ -330,7 +331,7 @@ private void assertIntHash(Set previousValues, BasicBlockTests.RandomBl assertThat(hash.size(), equalTo(sizeBeforeLookup)); assertLookup(previousValues, b, b, lookup, i -> (int) hash.get(i)); } - BasicBlockTests.RandomBlock other = randomBlock(); + RandomBlock other = randomBlock(); if (randomBoolean()) { other = b.merge(other); } @@ -342,7 +343,7 @@ private void assertIntHash(Set previousValues, BasicBlockTests.RandomBl } } - private void assertLongHash(Set previousValues, BasicBlockTests.RandomBlock b) { + private void assertLongHash(Set previousValues, RandomBlock b) { try (LongHash hash = new LongHash(1, blockFactory().bigArrays())) { previousValues.forEach(hash::add); MultivalueDedupe.HashResult hashes = new MultivalueDedupeLong((LongBlock) b.block()).hashAdd(blockFactory(), hash); @@ -354,7 +355,7 @@ private void assertLongHash(Set previousValues, BasicBlockTests.RandomBloc assertThat(hash.size(), equalTo(sizeBeforeLookup)); assertLookup(previousValues, b, b, lookup, hash::get); } - BasicBlockTests.RandomBlock other = randomBlock(); + RandomBlock other = randomBlock(); if (randomBoolean()) { other = b.merge(other); } @@ -367,7 +368,7 @@ private void assertLongHash(Set previousValues, BasicBlockTests.RandomBloc } } - private void assertDoubleHash(Set previousValues, BasicBlockTests.RandomBlock b) { + private void assertDoubleHash(Set previousValues, RandomBlock b) { LongHash hash = new LongHash(1, BigArrays.NON_RECYCLING_INSTANCE); previousValues.forEach(d -> hash.add(Double.doubleToLongBits(d))); MultivalueDedupe.HashResult hashes = new MultivalueDedupeDouble((DoubleBlock) b.block()).hashAdd(blockFactory(), hash); @@ -379,7 +380,7 @@ private void assertDoubleHash(Set previousValues, BasicBlockTests.Random assertThat(hash.size(), equalTo(sizeBeforeLookup)); assertLookup(previousValues, b, b, lookup, i -> Double.longBitsToDouble(hash.get(i))); } - BasicBlockTests.RandomBlock other = randomBlock(); + RandomBlock other = randomBlock(); if (randomBoolean()) { other = b.merge(other); } @@ -391,12 +392,12 @@ private void assertDoubleHash(Set previousValues, BasicBlockTests.Random } } - private Boolean shouldHaveSeenNull(BasicBlockTests.RandomBlock b) { + private Boolean shouldHaveSeenNull(RandomBlock b) { return b.values().stream().anyMatch(Objects::isNull); } private void assertHash( - BasicBlockTests.RandomBlock b, + RandomBlock b, IntBlock hashes, long hashSize, Set previousValues, @@ -432,8 +433,8 @@ private void assertHash( private void assertLookup( Set previousValues, - BasicBlockTests.RandomBlock hashed, - BasicBlockTests.RandomBlock lookedUp, + RandomBlock hashed, + RandomBlock lookedUp, IntBlock lookup, LongFunction valueLookup ) { @@ -463,7 +464,7 @@ private void assertLookup( } } - private int assertEncodedPosition(BasicBlockTests.RandomBlock b, BatchEncoder encoder, int position, int offset, int valueOffset) { + private int assertEncodedPosition(RandomBlock b, BatchEncoder encoder, int position, int offset, int valueOffset) { List expected = b.values().get(position); if (expected == null) { expected = new ArrayList<>(); @@ -509,9 +510,7 @@ private int assertEncodedPosition(BasicBlockTests.RandomBlock b, BatchEncoder en Block decoded = builder.build(); assertThat(decoded.getPositionCount(), equalTo(toDecode.length)); List actual = new ArrayList<>(); - BasicBlockTests.valuesAtPositions(decoded, 0, decoded.getPositionCount()) - .stream() - .forEach(l -> actual.add(l == null ? null : l.get(0))); + valuesAtPositions(decoded, 0, decoded.getPositionCount()).stream().forEach(l -> actual.add(l == null ? null : l.get(0))); Collections.sort(actual, Comparator.comparing(o -> { @SuppressWarnings("unchecked") // This is totally comparable, believe me var c = (Comparable) o; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java index f01e3c18c78bc..836b8795a6f79 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java @@ -15,12 +15,12 @@ import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; +import org.elasticsearch.compute.test.BlockTestUtils; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index be598f100563d..e63e8b63d6ee9 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -21,18 +21,18 @@ import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.TestBlockBuilder; -import org.elasticsearch.compute.data.TestBlockFactory; -import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.CountingCircuitBreaker; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.Operator; -import org.elasticsearch.compute.operator.OperatorTestCase; import org.elasticsearch.compute.operator.PageConsumerOperator; -import org.elasticsearch.compute.operator.SequenceLongBlockSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.operator.TupleBlockSourceOperator; +import org.elasticsearch.compute.test.CannedSourceOperator; +import org.elasticsearch.compute.test.OperatorTestCase; +import org.elasticsearch.compute.test.SequenceLongBlockSourceOperator; +import org.elasticsearch.compute.test.TestBlockBuilder; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Tuple; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.test.ESTestCase; @@ -59,9 +59,6 @@ import static java.util.Comparator.naturalOrder; import static java.util.Comparator.reverseOrder; -import static org.elasticsearch.compute.data.BlockTestUtils.append; -import static org.elasticsearch.compute.data.BlockTestUtils.randomValue; -import static org.elasticsearch.compute.data.BlockTestUtils.readInto; import static org.elasticsearch.compute.data.BlockUtils.toJavaObject; import static org.elasticsearch.compute.data.ElementType.BOOLEAN; import static org.elasticsearch.compute.data.ElementType.BYTES_REF; @@ -74,6 +71,9 @@ import static org.elasticsearch.compute.operator.topn.TopNEncoder.DEFAULT_UNSORTABLE; import static org.elasticsearch.compute.operator.topn.TopNEncoder.UTF8; import static org.elasticsearch.compute.operator.topn.TopNEncoderTests.randomPointAsWKB; +import static org.elasticsearch.compute.test.BlockTestUtils.append; +import static org.elasticsearch.compute.test.BlockTestUtils.randomValue; +import static org.elasticsearch.compute.test.BlockTestUtils.readInto; import static org.elasticsearch.core.Tuple.tuple; import static org.elasticsearch.test.ListMatcher.matchesList; import static org.elasticsearch.test.MapMatcher.assertMap; diff --git a/x-pack/plugin/esql/compute/test/build.gradle b/x-pack/plugin/esql/compute/test/build.gradle new file mode 100644 index 0000000000000..82c4701e84fea --- /dev/null +++ b/x-pack/plugin/esql/compute/test/build.gradle @@ -0,0 +1,11 @@ +apply plugin: 'elasticsearch.build' + +dependencies { + api project(':test:framework') + api project(xpackModule('esql:compute')) + // Use esql-core to see SpatialCoordinateTypes. That's in core because + // Literal is in core. There's a bit of a chain here. We'd prefer not + // do this, but such is life. For now. + api project(xpackModule('esql-core')) +} + diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AbstractBlockSourceOperator.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/AbstractBlockSourceOperator.java similarity index 95% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AbstractBlockSourceOperator.java rename to x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/AbstractBlockSourceOperator.java index 7158397b7794e..da0e1d20500e1 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AbstractBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/AbstractBlockSourceOperator.java @@ -5,10 +5,11 @@ * 2.0. */ -package org.elasticsearch.compute.operator; +package org.elasticsearch.compute.test; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.test.ESTestCase; /** diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/AnyOperatorTestCase.java similarity index 95% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java rename to x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/AnyOperatorTestCase.java index 3d4c8b8ed226e..226fb67fdc188 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/AnyOperatorTestCase.java @@ -5,13 +5,17 @@ * 2.0. */ -package org.elasticsearch.compute.operator; +package org.elasticsearch.compute.test; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.compute.operator.SinkOperator; +import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/BlockTestUtils.java similarity index 81% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java rename to x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/BlockTestUtils.java index 55e80a9124de0..c4579978b207e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockTestUtils.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/BlockTestUtils.java @@ -5,9 +5,21 @@ * 2.0. */ -package org.elasticsearch.compute.data; +package org.elasticsearch.compute.test; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DocBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.FloatBlock; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; import org.hamcrest.Matcher; import java.util.ArrayList; @@ -210,7 +222,7 @@ public static void readInto(List values, Block block) { */ @SuppressWarnings("unchecked") public static void assertPositionValues(Block b, int p, Matcher valuesMatcher) { - List value = BasicBlockTests.valuesAtPositions(b, p, p + 1).get(0); + List value = valuesAtPositions(b, p, p + 1).get(0); assertThat((T) value, valuesMatcher); if (value == null) { assertThat(b.getValueCount(p), equalTo(0)); @@ -229,4 +241,30 @@ public static Page deepCopyOf(Page page, BlockFactory blockFactory) { public static List deepCopyOf(List pages, BlockFactory blockFactory) { return pages.stream().map(page -> deepCopyOf(page, blockFactory)).toList(); } + + public static List> valuesAtPositions(Block block, int from, int to) { + List> result = new ArrayList<>(to - from); + for (int p = from; p < to; p++) { + if (block.isNull(p)) { + result.add(null); + continue; + } + int count = block.getValueCount(p); + List positionValues = new ArrayList<>(count); + int i = block.getFirstValueIndex(p); + for (int v = 0; v < count; v++) { + positionValues.add(switch (block.elementType()) { + case INT -> ((IntBlock) block).getInt(i++); + case LONG -> ((LongBlock) block).getLong(i++); + case FLOAT -> ((FloatBlock) block).getFloat(i++); + case DOUBLE -> ((DoubleBlock) block).getDouble(i++); + case BYTES_REF -> ((BytesRefBlock) block).getBytesRef(i++, new BytesRef()); + case BOOLEAN -> ((BooleanBlock) block).getBoolean(i++); + default -> throw new IllegalArgumentException("unsupported element type [" + block.elementType() + "]"); + }); + } + result.add(positionValues); + } + return result; + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/CannedSourceOperator.java similarity index 97% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java rename to x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/CannedSourceOperator.java index 4d5a6260ed02d..34ce21dad1030 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/CannedSourceOperator.java @@ -5,13 +5,13 @@ * 2.0. */ -package org.elasticsearch.compute.operator; +package org.elasticsearch.compute.test; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.TestBlockFactory; +import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ComputeTestCase.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/ComputeTestCase.java similarity index 97% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ComputeTestCase.java rename to x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/ComputeTestCase.java index cf99c59bb4c71..fd50c3dc8ae17 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ComputeTestCase.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/ComputeTestCase.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.compute.operator; +package org.elasticsearch.compute.test; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; @@ -14,7 +14,6 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/MockBlockFactory.java similarity index 91% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java rename to x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/MockBlockFactory.java index dc2aef4bca0eb..3849bb01ce3bd 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/MockBlockFactory.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.compute.data; +package org.elasticsearch.compute.test; import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BytesRef; @@ -13,7 +13,27 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.compute.data.AbstractBlockBuilder; +import org.elasticsearch.compute.data.AbstractVectorBuilder; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.Block.MvOrdering; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BooleanVectorFixedBuilder; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.DoubleVectorFixedBuilder; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.IntVectorFixedBuilder; +import org.elasticsearch.compute.data.LocalCircuitBreaker; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.elasticsearch.compute.data.LongVectorFixedBuilder; +import org.elasticsearch.compute.data.Vector; import java.util.BitSet; import java.util.HashMap; @@ -86,7 +106,7 @@ public BlockFactory newChildFactory(LocalCircuitBreaker childBreaker) { } @Override - void adjustBreaker(final long delta) { + public void adjustBreaker(final long delta) { purgeTrackBlocks(); super.adjustBreaker(delta); } @@ -99,7 +119,7 @@ void purgeTrackBlocks() { TRACKED_BLOCKS.remove(block); } } else if (b instanceof AbstractBlockBuilder blockBuilder) { - if (blockBuilder.closed) { + if (blockBuilder.isReleased()) { TRACKED_BLOCKS.remove(blockBuilder); } } else if (b instanceof IntVectorFixedBuilder vecBuilder) { @@ -119,7 +139,7 @@ void purgeTrackBlocks() { TRACKED_BLOCKS.remove(vecBuilder); } } else if (b instanceof AbstractVectorBuilder vecBuilder) { - if (vecBuilder.closed) { + if (vecBuilder.isReleased()) { TRACKED_BLOCKS.remove(vecBuilder); } } else if (b instanceof Vector vector) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java similarity index 97% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java rename to x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java index 28b7095aa1bde..9b745c5ff0354 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/OperatorTestCase.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.compute.operator; +package org.elasticsearch.compute.test; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; @@ -22,10 +22,14 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BlockTestUtils; -import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.TestBlockFactory; +import org.elasticsearch.compute.operator.Driver; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.DriverRunner; +import org.elasticsearch.compute.operator.Operator; +import org.elasticsearch.compute.operator.PageConsumerOperator; +import org.elasticsearch.compute.operator.SinkOperator; +import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.CrankyCircuitBreakerService; diff --git a/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/RandomBlock.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/RandomBlock.java new file mode 100644 index 0000000000000..dabf8d437fb5f --- /dev/null +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/RandomBlock.java @@ -0,0 +1,201 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.test; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.FloatBlock; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.geo.GeometryTestUtils; +import org.elasticsearch.geo.ShapeTestUtils; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; + +import java.util.ArrayList; +import java.util.List; +import java.util.function.Supplier; + +/** + * A block of random values. + * @param values the values as java object + * @param block randomly built block + */ +public record RandomBlock(List> values, Block block) { + /** + * A random {@link ElementType} for which we can build a {@link RandomBlock}. + */ + public static ElementType randomElementType() { + return ESTestCase.randomValueOtherThanMany( + e -> e == ElementType.UNKNOWN || e == ElementType.NULL || e == ElementType.DOC || e == ElementType.COMPOSITE, + () -> ESTestCase.randomFrom(ElementType.values()) + ); + } + + // TODO Some kind of builder for this with nice defaults. We do this all the time and there's like a zillion parameters. + + public static RandomBlock randomBlock( + ElementType elementType, + int positionCount, + boolean nullAllowed, + int minValuesPerPosition, + int maxValuesPerPosition, + int minDupsPerPosition, + int maxDupsPerPosition + ) { + return randomBlock( + TestBlockFactory.getNonBreakingInstance(), + elementType, + positionCount, + nullAllowed, + minValuesPerPosition, + maxValuesPerPosition, + minDupsPerPosition, + maxDupsPerPosition + ); + } + + public static RandomBlock randomBlock( + BlockFactory blockFactory, + ElementType elementType, + int positionCount, + boolean nullAllowed, + int minValuesPerPosition, + int maxValuesPerPosition, + int minDupsPerPosition, + int maxDupsPerPosition + ) { + List> values = new ArrayList<>(); + Block.MvOrdering mvOrdering = Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING; + try (var builder = elementType.newBlockBuilder(positionCount, blockFactory)) { + boolean bytesRefFromPoints = ESTestCase.randomBoolean(); + Supplier pointSupplier = ESTestCase.randomBoolean() ? GeometryTestUtils::randomPoint : ShapeTestUtils::randomPoint; + for (int p = 0; p < positionCount; p++) { + if (elementType == ElementType.NULL) { + assert nullAllowed; + values.add(null); + builder.appendNull(); + continue; + } + int valueCount = ESTestCase.between(minValuesPerPosition, maxValuesPerPosition); + if (valueCount == 0 || nullAllowed && ESTestCase.randomBoolean()) { + values.add(null); + builder.appendNull(); + continue; + } + int dupCount = ESTestCase.between(minDupsPerPosition, maxDupsPerPosition); + if (valueCount != 1 || dupCount != 0) { + builder.beginPositionEntry(); + } + List valuesAtPosition = new ArrayList<>(); + values.add(valuesAtPosition); + for (int v = 0; v < valueCount; v++) { + switch (elementType) { + case INT -> { + int i = ESTestCase.randomInt(); + valuesAtPosition.add(i); + ((IntBlock.Builder) builder).appendInt(i); + } + case LONG -> { + long l = ESTestCase.randomLong(); + valuesAtPosition.add(l); + ((LongBlock.Builder) builder).appendLong(l); + } + case FLOAT -> { + float f = ESTestCase.randomFloat(); + valuesAtPosition.add(f); + ((FloatBlock.Builder) builder).appendFloat(f); + } + case DOUBLE -> { + double d = ESTestCase.randomDouble(); + valuesAtPosition.add(d); + ((DoubleBlock.Builder) builder).appendDouble(d); + } + case BYTES_REF -> { + BytesRef b = bytesRefFromPoints + ? SpatialCoordinateTypes.GEO.asWkb(pointSupplier.get()) + : new BytesRef(ESTestCase.randomRealisticUnicodeOfLength(4)); + valuesAtPosition.add(b); + ((BytesRefBlock.Builder) builder).appendBytesRef(b); + } + case BOOLEAN -> { + boolean b = ESTestCase.randomBoolean(); + valuesAtPosition.add(b); + ((BooleanBlock.Builder) builder).appendBoolean(b); + } + default -> throw new IllegalArgumentException("unsupported element type [" + elementType + "]"); + } + } + for (int i = 0; i < dupCount; i++) { + BlockTestUtils.append(builder, ESTestCase.randomFrom(valuesAtPosition)); + } + if (valueCount != 1 || dupCount != 0) { + builder.endPositionEntry(); + } + if (dupCount > 0) { + mvOrdering = Block.MvOrdering.UNORDERED; + } else if (mvOrdering != Block.MvOrdering.UNORDERED) { + List dedupedAndSortedList = valuesAtPosition.stream().sorted().distinct().toList(); + if (dedupedAndSortedList.size() != valuesAtPosition.size()) { + mvOrdering = Block.MvOrdering.UNORDERED; + } else if (dedupedAndSortedList.equals(valuesAtPosition) == false) { + mvOrdering = Block.MvOrdering.DEDUPLICATED_UNORDERD; + } + } + } + if (ESTestCase.randomBoolean()) { + builder.mvOrdering(mvOrdering); + } + return new RandomBlock(values, builder.build()); + } + } + + public int valueCount() { + return values.stream().mapToInt(l -> l == null ? 0 : l.size()).sum(); + } + + /** + * Build a {@link RandomBlock} contain the values of two blocks, preserving the relative order. + */ + public RandomBlock merge(RandomBlock rhs) { + int estimatedSize = values().size() + rhs.values().size(); + int l = 0; + int r = 0; + List> mergedValues = new ArrayList<>(estimatedSize); + try (Block.Builder mergedBlock = block.elementType().newBlockBuilder(estimatedSize, block.blockFactory())) { + while (l < values.size() && r < rhs.values.size()) { + if (ESTestCase.randomBoolean()) { + mergedValues.add(values.get(l)); + mergedBlock.copyFrom(block, l, l + 1); + l++; + } else { + mergedValues.add(rhs.values.get(r)); + mergedBlock.copyFrom(rhs.block, r, r + 1); + r++; + } + } + while (l < values.size()) { + mergedValues.add(values.get(l)); + mergedBlock.copyFrom(block, l, l + 1); + l++; + } + while (r < rhs.values.size()) { + mergedValues.add(rhs.values.get(r)); + mergedBlock.copyFrom(rhs.block, r, r + 1); + r++; + } + return new RandomBlock(mergedValues, mergedBlock.build()); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/SequenceLongBlockSourceOperator.java similarity index 97% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java rename to x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/SequenceLongBlockSourceOperator.java index bb32e148440b4..5c44a9d6123b6 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceLongBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/SequenceLongBlockSourceOperator.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.compute.operator; +package org.elasticsearch.compute.test; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/TestBlockBuilder.java similarity index 96% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java rename to x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/TestBlockBuilder.java index 6472e1fbba4f7..b51de1e477ee0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/TestBlockBuilder.java @@ -5,9 +5,18 @@ * 2.0. */ -package org.elasticsearch.compute.data; +package org.elasticsearch.compute.test; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.ElementType; +import org.elasticsearch.compute.data.FloatBlock; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; import java.util.List; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/TestBlockFactory.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/TestBlockFactory.java similarity index 88% rename from x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/TestBlockFactory.java rename to x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/TestBlockFactory.java index 99cf8be307054..42e199d6d192a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/TestBlockFactory.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/TestBlockFactory.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql; +package org.elasticsearch.compute.test; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.util.BigArrays; @@ -14,7 +14,7 @@ public class TestBlockFactory { private static final BlockFactory NON_BREAKING = BlockFactory.getInstance( - new NoopCircuitBreaker("noop-esql-breaker"), + new NoopCircuitBreaker("test-noop"), BigArrays.NON_RECYCLING_INSTANCE ); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TestResultPageSinkOperator.java b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/TestResultPageSinkOperator.java similarity index 84% rename from x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TestResultPageSinkOperator.java rename to x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/TestResultPageSinkOperator.java index e2cb0e21938e2..ff330bea5dba0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TestResultPageSinkOperator.java +++ b/x-pack/plugin/esql/compute/test/src/main/java/org/elasticsearch/compute/test/TestResultPageSinkOperator.java @@ -5,11 +5,10 @@ * 2.0. */ -package org.elasticsearch.compute.operator; +package org.elasticsearch.compute.test; -import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.data.TestBlockFactory; +import org.elasticsearch.compute.operator.PageConsumerOperator; import java.util.function.Consumer; diff --git a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java index 98e5799c8d3f2..d8e3b0cccf394 100644 --- a/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java +++ b/x-pack/plugin/esql/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/esql/EsqlSecurityIT.java @@ -597,6 +597,16 @@ public void testLookupJoinIndexForbidden() throws Exception { assertThat(resp.getMessage(), containsString("Unknown index [lookup-user1]")); assertThat(resp.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); + resp = expectThrows( + ResponseException.class, + () -> runESQLCommand( + "metadata1_read2", + "FROM lookup-user2 | EVAL value = 10.0 | LOOKUP JOIN `lookup-first-alias` ON value | KEEP x" + ) + ); + assertThat(resp.getMessage(), containsString("Unknown index [lookup-first-alias]")); + assertThat(resp.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); + resp = expectThrows( ResponseException.class, () -> runESQLCommand("metadata1_read2", "ROW x = 10.0 | EVAL value = x | LOOKUP JOIN `lookup-user1` ON value | KEEP x") @@ -612,6 +622,20 @@ public void testLookupJoinIndexForbidden() throws Exception { assertThat(resp.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); } + public void testFromLookupIndexForbidden() throws Exception { + var resp = expectThrows(ResponseException.class, () -> runESQLCommand("metadata1_read2", "FROM lookup-user1")); + assertThat(resp.getMessage(), containsString("Unknown index [lookup-user1]")); + assertThat(resp.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); + + resp = expectThrows(ResponseException.class, () -> runESQLCommand("metadata1_read2", "FROM lookup-first-alias")); + assertThat(resp.getMessage(), containsString("Unknown index [lookup-first-alias]")); + assertThat(resp.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); + + resp = expectThrows(ResponseException.class, () -> runESQLCommand("alias_user1", "FROM lookup-user1")); + assertThat(resp.getMessage(), containsString("Unknown index [lookup-user1]")); + assertThat(resp.getResponse().getStatusLine().getStatusCode(), equalTo(HttpStatus.SC_BAD_REQUEST)); + } + private void createEnrichPolicy() throws Exception { createIndex("songs", Settings.EMPTY, """ "properties":{"song_id": {"type": "keyword"}, "title": {"type": "keyword"}, "artist": {"type": "keyword"} } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java index 7e25fb29fdb78..f3b2ea0d864ff 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/EsqlTestUtils.java @@ -44,7 +44,6 @@ import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.expression.predicate.Range; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -55,6 +54,7 @@ import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; +import org.elasticsearch.xpack.esql.expression.predicate.Range; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; @@ -215,7 +215,7 @@ public static Range rangeOf(Expression value, Expression lower, boolean includeL } public static EsRelation relation() { - return new EsRelation(EMPTY, new EsIndex(randomAlphaOfLength(8), emptyMap()), IndexMode.STANDARD, randomBoolean()); + return new EsRelation(EMPTY, new EsIndex(randomAlphaOfLength(8), emptyMap()), IndexMode.STANDARD); } /** diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index bb46c9d31f745..e5cab8de8092b 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -1452,6 +1452,7 @@ cnt:long ; implicit casting strings to dates for IN operator +required_capability: string_literal_auto_casting FROM employees | WHERE birth_date IN ("1953-04-20", "1958-10-31") | KEEP emp_no, first_name; @@ -1462,7 +1463,7 @@ emp_no:integer | first_name:keyword 10025 | Prasadram ; -IN operator with null in list, finds match +IN operator with null in list, finds match#[skip:-8.13.99, reason: default date formatter is changed in 8.14] FROM employees | EVAL x = NULL @@ -1473,7 +1474,7 @@ birth_date:datetime | first_name:keyword 1958-02-19T00:00:00.000Z | Saniya ; -IN operator with null in list, doesn't find match +IN operator with null in list, doesn't find match#[skip:-8.13.99, reason: default date formatter is changed in 8.14] FROM employees | EVAL x = NULL @@ -1483,7 +1484,7 @@ FROM employees birth_date:datetime | first_name:keyword ; -IN operator with null in list, doesn't find match, EVAL to check value +IN operator with null in list, doesn't find match, EVAL to check value#[skip:-8.13.99, reason: default date formatter is changed in 8.14] FROM employees | EVAL x = NULL diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec index f44653171a4f5..b2a063e509a85 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date_nanos.csv-spec @@ -748,6 +748,7 @@ ct:long | mo:date_nanos Bucket Date nanos by 10 minutes required_capability: date_trunc_date_nanos required_capability: date_nanos_bucket +required_capability: string_literal_auto_casting FROM date_nanos | WHERE millis > "2020-01-01" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec index 8d24ddb45602b..c8203042a23de 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/lookup-join.csv-spec @@ -206,10 +206,7 @@ language_code:integer | language_name:keyword 4 | German ; -dropAllLookedUpFieldsOnTheDataNode-Ignore -// Depends on -// https://github.com/elastic/elasticsearch/issues/118778 -// https://github.com/elastic/elasticsearch/issues/118781 +keepFieldNotInLookup required_capability: join_lookup_v11 FROM employees @@ -217,7 +214,7 @@ FROM employees | LOOKUP JOIN languages_lookup_non_unique_key ON language_code | WHERE emp_no == 10001 | SORT emp_no -| DROP language* +| keep emp_no ; emp_no:integer @@ -227,18 +224,15 @@ emp_no:integer 10001 ; -dropAllLookedUpFieldsOnTheCoordinator-Ignore -// Depends on -// https://github.com/elastic/elasticsearch/issues/118778 -// https://github.com/elastic/elasticsearch/issues/118781 +dropAllFieldsUsedInLookup required_capability: join_lookup_v11 FROM employees -| SORT emp_no -| LIMIT 2 +| WHERE emp_no == 10001 +| keep emp_no | EVAL language_code = emp_no % 10 | LOOKUP JOIN languages_lookup_non_unique_key ON language_code -| DROP language* +| DROP language_*, country* ; emp_no:integer @@ -246,9 +240,6 @@ emp_no:integer 10001 10001 10001 -10002 -10002 -10002 ; ############################################### diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/map-functions.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/map-functions.csv-spec new file mode 100644 index 0000000000000..37e493c55835a --- /dev/null +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/map-functions.csv-spec @@ -0,0 +1,122 @@ +// Tests to validate maps as inputs to functions, these functions are under snapshot only + +logWithBaseInMapEval +required_capability: optional_named_argument_map_for_function +ROW value = 8.0 +| EVAL l = log_with_base_in_map(value, {"base":2.0}) +; + +value: double |l:double +8.0 |3.0 +; + +logWithOptionalMapMissingEval +required_capability: optional_named_argument_map_for_function +ROW value = 8.0 +| EVAL l = round(log_with_base_in_map(value)) +; + +value: double |l:double +8.0 |2.0 +; + + +logWithBaseInMapEvalIndex +required_capability: optional_named_argument_map_for_function +FROM employees +| WHERE emp_no IN (10001, 10003) +| EVAL l = log_with_base_in_map(languages, {"base":2.0}) +| KEEP emp_no, languages, l +| SORT emp_no +; + +emp_no:integer |languages:integer |l:double +10001 |2 |1.0 +10003 |4 |2.0 +; + +logWithOptionalMapMissingEvalIndex +required_capability: optional_named_argument_map_for_function +FROM employees +| WHERE emp_no IN (10001, 10003) +| EVAL l = round(log_with_base_in_map(languages)) +| KEEP emp_no, languages, l +| SORT emp_no +; + +emp_no:integer |languages:integer |l:double +10001 |2 |1.0 +10003 |4 |1.0 +; + +logWithBaseInMapWhereTrueIndex +required_capability: optional_named_argument_map_for_function +FROM employees +| WHERE emp_no IN (10001, 10003) AND log_with_base_in_map(languages, {"base":2.0}) > 1 +| KEEP emp_no, languages +| SORT emp_no +; + +emp_no:integer |languages:integer +10003 |4 +; + +logWithOptionalMapMissingWhereTrueIndex +required_capability: optional_named_argument_map_for_function +FROM employees +| WHERE emp_no IN (10001, 10003) AND log_with_base_in_map(languages) > 1 +| KEEP emp_no, languages +| SORT emp_no +; + +emp_no:integer |languages:integer +10003 |4 +; + +logWithBaseInMapWhereFalseIndex +required_capability: optional_named_argument_map_for_function +FROM employees +| WHERE emp_no IN (10001, 10003) AND log_with_base_in_map(languages, {"base":2.0}) < 0 +| KEEP emp_no, languages +| SORT emp_no +; + +emp_no:integer |languages:integer +; + +logWithOptionalMapMissingWhereFalseIndex +required_capability: optional_named_argument_map_for_function +FROM employees +| WHERE emp_no IN (10001, 10003) AND log_with_base_in_map(languages) < 0 +| KEEP emp_no, languages +| SORT emp_no +; + +emp_no:integer |languages:integer +; + +logWithBaseInMapSortIndex +required_capability: optional_named_argument_map_for_function +FROM employees +| WHERE emp_no IN (10001, 10003) +| SORT log_with_base_in_map(languages, {"base":2.0}) desc +| KEEP emp_no +; + +emp_no:integer +10003 +10001 +; + +logWithOptionalMapMissingSortIndex +required_capability: optional_named_argument_map_for_function +FROM employees +| WHERE emp_no IN (10001, 10003) +| SORT log_with_base_in_map(languages) desc +| KEEP emp_no +; + +emp_no:integer +10003 +10001 +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index 2fe2feb3bc219..b2b4f15860484 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -771,6 +771,28 @@ ul:ul 18446744073709551615 ; +roundMaxULWithBigNegativeDecimals +required_capability: fn_round_ul_fixes +row + ul1 = round(18446744073709551615, -6144415263046370459::long), + ul2 = round(18446744073709551615, -20::long), + ul3 = round(12446744073709551615, -19::long); + +ul1:ul | ul2:ul | ul3:ul +0 | 0 | 10000000000000000000 +; + +roundBigULWithRoundULOverflow +required_capability: fn_round_ul_fixes +row ul = round(18446744073709551615, -19::long); + +warning:Line 1:10: evaluation of [round(18446744073709551615, -19::long)] failed, treating result as null. Only first 20 failures recorded. +warning:Line 1:10: java.lang.ArithmeticException: unsigned_long overflow + +ul:ul +null +; + mvAvg from employees | where emp_no > 10008 | eval salary_change = mv_avg(salary_change) | sort emp_no | keep emp_no, salary_change.int, salary_change | limit 7; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec index b4cd18f728858..a213c378d33d8 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/metadata.csv-spec @@ -39,6 +39,26 @@ max:integer |_index:keyword // end::metaIndexInAggs-result[] ; +metaIndexSorted +required_capability: metadata_fields +from employees metadata _index | sort _index, emp_no desc | keep emp_no, _index | limit 2; + + +emp_no:integer |_index:keyword +10100 |employees +10099 |employees +; + +metaIndexWithInPredicate +required_capability: metadata_fields +from employees metadata _index | where _index in ("employees", "foobar") | sort emp_no desc | keep emp_no, _index | limit 2; + + +emp_no:integer |_index:keyword +10100 |employees +10099 |employees +; + metaIndexAliasedInAggs required_capability: metadata_fields from employees metadata _index | eval _i = _index | stats max = max(emp_no) by _i; @@ -64,6 +84,26 @@ min:integer |_v:long 10001 |1 ; +metaVersionSorted +required_capability: metadata_fields +from employees metadata _version | sort _version, emp_no desc | keep emp_no, _version | limit 2; + + +emp_no:integer |_version:long +10100 |1 +10099 |1 +; + +metaVersionWithInPredicate +required_capability: metadata_fields +from employees metadata _version | where _version in (1, 2, 3) | sort emp_no desc | keep emp_no, _version | limit 2; + + +emp_no:integer |_version:long +10100 |1 +10099 |1 +; + inAggsAndAsGroups required_capability: metadata_fields from employees metadata _index, _version | stats max = max(_version) by _index; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java index e2e635917ed1c..862adadd1f105 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java @@ -15,6 +15,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.plugins.Plugin; @@ -23,7 +24,6 @@ import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; import org.elasticsearch.xpack.core.async.TransportDeleteAsyncResultAction; import org.elasticsearch.xpack.core.esql.action.ColumnInfo; -import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import java.nio.file.Path; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index ef875d7ca01d8..72fb491cdd982 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -216,6 +216,9 @@ ASTERISK : '*'; SLASH : '/'; PERCENT : '%'; +LEFT_BRACES : {this.isDevVersion()}? '{'; +RIGHT_BRACES : {this.isDevVersion()}? '}'; + NESTED_WHERE : WHERE -> type(WHERE); NAMED_OR_POSITIONAL_PARAM diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens index b1a16987dd8ce..366b455f16402 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens @@ -66,66 +66,68 @@ MINUS=65 ASTERISK=66 SLASH=67 PERCENT=68 -NAMED_OR_POSITIONAL_PARAM=69 -OPENING_BRACKET=70 -CLOSING_BRACKET=71 -UNQUOTED_IDENTIFIER=72 -QUOTED_IDENTIFIER=73 -EXPR_LINE_COMMENT=74 -EXPR_MULTILINE_COMMENT=75 -EXPR_WS=76 -EXPLAIN_WS=77 -EXPLAIN_LINE_COMMENT=78 -EXPLAIN_MULTILINE_COMMENT=79 -METADATA=80 -UNQUOTED_SOURCE=81 -FROM_LINE_COMMENT=82 -FROM_MULTILINE_COMMENT=83 -FROM_WS=84 -ID_PATTERN=85 -PROJECT_LINE_COMMENT=86 -PROJECT_MULTILINE_COMMENT=87 -PROJECT_WS=88 -AS=89 -RENAME_LINE_COMMENT=90 -RENAME_MULTILINE_COMMENT=91 -RENAME_WS=92 -ON=93 -WITH=94 -ENRICH_POLICY_NAME=95 -ENRICH_LINE_COMMENT=96 -ENRICH_MULTILINE_COMMENT=97 -ENRICH_WS=98 -ENRICH_FIELD_LINE_COMMENT=99 -ENRICH_FIELD_MULTILINE_COMMENT=100 -ENRICH_FIELD_WS=101 -MVEXPAND_LINE_COMMENT=102 -MVEXPAND_MULTILINE_COMMENT=103 -MVEXPAND_WS=104 -INFO=105 -SHOW_LINE_COMMENT=106 -SHOW_MULTILINE_COMMENT=107 -SHOW_WS=108 -SETTING=109 -SETTING_LINE_COMMENT=110 -SETTTING_MULTILINE_COMMENT=111 -SETTING_WS=112 -LOOKUP_LINE_COMMENT=113 -LOOKUP_MULTILINE_COMMENT=114 -LOOKUP_WS=115 -LOOKUP_FIELD_LINE_COMMENT=116 -LOOKUP_FIELD_MULTILINE_COMMENT=117 -LOOKUP_FIELD_WS=118 -USING=119 -JOIN_LINE_COMMENT=120 -JOIN_MULTILINE_COMMENT=121 -JOIN_WS=122 -METRICS_LINE_COMMENT=123 -METRICS_MULTILINE_COMMENT=124 -METRICS_WS=125 -CLOSING_METRICS_LINE_COMMENT=126 -CLOSING_METRICS_MULTILINE_COMMENT=127 -CLOSING_METRICS_WS=128 +LEFT_BRACES=69 +RIGHT_BRACES=70 +NAMED_OR_POSITIONAL_PARAM=71 +OPENING_BRACKET=72 +CLOSING_BRACKET=73 +UNQUOTED_IDENTIFIER=74 +QUOTED_IDENTIFIER=75 +EXPR_LINE_COMMENT=76 +EXPR_MULTILINE_COMMENT=77 +EXPR_WS=78 +EXPLAIN_WS=79 +EXPLAIN_LINE_COMMENT=80 +EXPLAIN_MULTILINE_COMMENT=81 +METADATA=82 +UNQUOTED_SOURCE=83 +FROM_LINE_COMMENT=84 +FROM_MULTILINE_COMMENT=85 +FROM_WS=86 +ID_PATTERN=87 +PROJECT_LINE_COMMENT=88 +PROJECT_MULTILINE_COMMENT=89 +PROJECT_WS=90 +AS=91 +RENAME_LINE_COMMENT=92 +RENAME_MULTILINE_COMMENT=93 +RENAME_WS=94 +ON=95 +WITH=96 +ENRICH_POLICY_NAME=97 +ENRICH_LINE_COMMENT=98 +ENRICH_MULTILINE_COMMENT=99 +ENRICH_WS=100 +ENRICH_FIELD_LINE_COMMENT=101 +ENRICH_FIELD_MULTILINE_COMMENT=102 +ENRICH_FIELD_WS=103 +MVEXPAND_LINE_COMMENT=104 +MVEXPAND_MULTILINE_COMMENT=105 +MVEXPAND_WS=106 +INFO=107 +SHOW_LINE_COMMENT=108 +SHOW_MULTILINE_COMMENT=109 +SHOW_WS=110 +SETTING=111 +SETTING_LINE_COMMENT=112 +SETTTING_MULTILINE_COMMENT=113 +SETTING_WS=114 +LOOKUP_LINE_COMMENT=115 +LOOKUP_MULTILINE_COMMENT=116 +LOOKUP_WS=117 +LOOKUP_FIELD_LINE_COMMENT=118 +LOOKUP_FIELD_MULTILINE_COMMENT=119 +LOOKUP_FIELD_WS=120 +USING=121 +JOIN_LINE_COMMENT=122 +JOIN_MULTILINE_COMMENT=123 +JOIN_WS=124 +METRICS_LINE_COMMENT=125 +METRICS_MULTILINE_COMMENT=126 +METRICS_WS=127 +CLOSING_METRICS_LINE_COMMENT=128 +CLOSING_METRICS_MULTILINE_COMMENT=129 +CLOSING_METRICS_WS=130 'dissect'=1 'drop'=2 'enrich'=3 @@ -179,10 +181,10 @@ CLOSING_METRICS_WS=128 '*'=66 '/'=67 '%'=68 -']'=71 -'metadata'=80 -'as'=89 -'on'=93 -'with'=94 -'info'=105 -'USING'=119 +']'=73 +'metadata'=82 +'as'=91 +'on'=95 +'with'=96 +'info'=107 +'USING'=121 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 4d4cb7d2caac4..e12904a25b131 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -102,13 +102,21 @@ primaryExpression ; functionExpression - : functionName LP (ASTERISK | (booleanExpression (COMMA booleanExpression)*))? RP + : functionName LP (ASTERISK | (booleanExpression (COMMA booleanExpression)* (COMMA mapExpression)?))? RP ; functionName : identifierOrParameter ; +mapExpression + : {this.isDevVersion()}? LEFT_BRACES entryExpression (COMMA entryExpression)* RIGHT_BRACES + ; + +entryExpression + : key=string COLON value=constant + ; + dataType : identifier #toDataType ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens index b1a16987dd8ce..366b455f16402 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens @@ -66,66 +66,68 @@ MINUS=65 ASTERISK=66 SLASH=67 PERCENT=68 -NAMED_OR_POSITIONAL_PARAM=69 -OPENING_BRACKET=70 -CLOSING_BRACKET=71 -UNQUOTED_IDENTIFIER=72 -QUOTED_IDENTIFIER=73 -EXPR_LINE_COMMENT=74 -EXPR_MULTILINE_COMMENT=75 -EXPR_WS=76 -EXPLAIN_WS=77 -EXPLAIN_LINE_COMMENT=78 -EXPLAIN_MULTILINE_COMMENT=79 -METADATA=80 -UNQUOTED_SOURCE=81 -FROM_LINE_COMMENT=82 -FROM_MULTILINE_COMMENT=83 -FROM_WS=84 -ID_PATTERN=85 -PROJECT_LINE_COMMENT=86 -PROJECT_MULTILINE_COMMENT=87 -PROJECT_WS=88 -AS=89 -RENAME_LINE_COMMENT=90 -RENAME_MULTILINE_COMMENT=91 -RENAME_WS=92 -ON=93 -WITH=94 -ENRICH_POLICY_NAME=95 -ENRICH_LINE_COMMENT=96 -ENRICH_MULTILINE_COMMENT=97 -ENRICH_WS=98 -ENRICH_FIELD_LINE_COMMENT=99 -ENRICH_FIELD_MULTILINE_COMMENT=100 -ENRICH_FIELD_WS=101 -MVEXPAND_LINE_COMMENT=102 -MVEXPAND_MULTILINE_COMMENT=103 -MVEXPAND_WS=104 -INFO=105 -SHOW_LINE_COMMENT=106 -SHOW_MULTILINE_COMMENT=107 -SHOW_WS=108 -SETTING=109 -SETTING_LINE_COMMENT=110 -SETTTING_MULTILINE_COMMENT=111 -SETTING_WS=112 -LOOKUP_LINE_COMMENT=113 -LOOKUP_MULTILINE_COMMENT=114 -LOOKUP_WS=115 -LOOKUP_FIELD_LINE_COMMENT=116 -LOOKUP_FIELD_MULTILINE_COMMENT=117 -LOOKUP_FIELD_WS=118 -USING=119 -JOIN_LINE_COMMENT=120 -JOIN_MULTILINE_COMMENT=121 -JOIN_WS=122 -METRICS_LINE_COMMENT=123 -METRICS_MULTILINE_COMMENT=124 -METRICS_WS=125 -CLOSING_METRICS_LINE_COMMENT=126 -CLOSING_METRICS_MULTILINE_COMMENT=127 -CLOSING_METRICS_WS=128 +LEFT_BRACES=69 +RIGHT_BRACES=70 +NAMED_OR_POSITIONAL_PARAM=71 +OPENING_BRACKET=72 +CLOSING_BRACKET=73 +UNQUOTED_IDENTIFIER=74 +QUOTED_IDENTIFIER=75 +EXPR_LINE_COMMENT=76 +EXPR_MULTILINE_COMMENT=77 +EXPR_WS=78 +EXPLAIN_WS=79 +EXPLAIN_LINE_COMMENT=80 +EXPLAIN_MULTILINE_COMMENT=81 +METADATA=82 +UNQUOTED_SOURCE=83 +FROM_LINE_COMMENT=84 +FROM_MULTILINE_COMMENT=85 +FROM_WS=86 +ID_PATTERN=87 +PROJECT_LINE_COMMENT=88 +PROJECT_MULTILINE_COMMENT=89 +PROJECT_WS=90 +AS=91 +RENAME_LINE_COMMENT=92 +RENAME_MULTILINE_COMMENT=93 +RENAME_WS=94 +ON=95 +WITH=96 +ENRICH_POLICY_NAME=97 +ENRICH_LINE_COMMENT=98 +ENRICH_MULTILINE_COMMENT=99 +ENRICH_WS=100 +ENRICH_FIELD_LINE_COMMENT=101 +ENRICH_FIELD_MULTILINE_COMMENT=102 +ENRICH_FIELD_WS=103 +MVEXPAND_LINE_COMMENT=104 +MVEXPAND_MULTILINE_COMMENT=105 +MVEXPAND_WS=106 +INFO=107 +SHOW_LINE_COMMENT=108 +SHOW_MULTILINE_COMMENT=109 +SHOW_WS=110 +SETTING=111 +SETTING_LINE_COMMENT=112 +SETTTING_MULTILINE_COMMENT=113 +SETTING_WS=114 +LOOKUP_LINE_COMMENT=115 +LOOKUP_MULTILINE_COMMENT=116 +LOOKUP_WS=117 +LOOKUP_FIELD_LINE_COMMENT=118 +LOOKUP_FIELD_MULTILINE_COMMENT=119 +LOOKUP_FIELD_WS=120 +USING=121 +JOIN_LINE_COMMENT=122 +JOIN_MULTILINE_COMMENT=123 +JOIN_WS=124 +METRICS_LINE_COMMENT=125 +METRICS_MULTILINE_COMMENT=126 +METRICS_WS=127 +CLOSING_METRICS_LINE_COMMENT=128 +CLOSING_METRICS_MULTILINE_COMMENT=129 +CLOSING_METRICS_WS=130 'dissect'=1 'drop'=2 'enrich'=3 @@ -179,10 +181,10 @@ CLOSING_METRICS_WS=128 '*'=66 '/'=67 '%'=68 -']'=71 -'metadata'=80 -'as'=89 -'on'=93 -'with'=94 -'info'=105 -'USING'=119 +']'=73 +'metadata'=82 +'as'=91 +'on'=95 +'with'=96 +'info'=107 +'USING'=121 diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/map/LogWithBaseInMapEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/map/LogWithBaseInMapEvaluator.java new file mode 100644 index 0000000000000..11c28c2a1f692 --- /dev/null +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/map/LogWithBaseInMapEvaluator.java @@ -0,0 +1,139 @@ +// Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one +// or more contributor license agreements. Licensed under the Elastic License +// 2.0; you may not use this file except in compliance with the Elastic License +// 2.0. +package org.elasticsearch.xpack.esql.expression.function.scalar.map; + +import java.lang.ArithmeticException; +import java.lang.IllegalArgumentException; +import java.lang.Override; +import java.lang.String; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.operator.Warnings; +import org.elasticsearch.core.Releasables; +import org.elasticsearch.xpack.esql.core.tree.Source; + +/** + * {@link EvalOperator.ExpressionEvaluator} implementation for {@link LogWithBaseInMap}. + * This class is generated. Do not edit it. + */ +public final class LogWithBaseInMapEvaluator implements EvalOperator.ExpressionEvaluator { + private final Source source; + + private final EvalOperator.ExpressionEvaluator value; + + private final double base; + + private final DriverContext driverContext; + + private Warnings warnings; + + public LogWithBaseInMapEvaluator(Source source, EvalOperator.ExpressionEvaluator value, + double base, DriverContext driverContext) { + this.source = source; + this.value = value; + this.base = base; + this.driverContext = driverContext; + } + + @Override + public Block eval(Page page) { + try (DoubleBlock valueBlock = (DoubleBlock) value.eval(page)) { + DoubleVector valueVector = valueBlock.asVector(); + if (valueVector == null) { + return eval(page.getPositionCount(), valueBlock); + } + return eval(page.getPositionCount(), valueVector); + } + } + + public DoubleBlock eval(int positionCount, DoubleBlock valueBlock) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + if (valueBlock.isNull(p)) { + result.appendNull(); + continue position; + } + if (valueBlock.getValueCount(p) != 1) { + if (valueBlock.getValueCount(p) > 1) { + warnings().registerException(new IllegalArgumentException("single-value function encountered multi-value")); + } + result.appendNull(); + continue position; + } + try { + result.appendDouble(LogWithBaseInMap.process(valueBlock.getDouble(valueBlock.getFirstValueIndex(p)), this.base)); + } catch (ArithmeticException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + public DoubleBlock eval(int positionCount, DoubleVector valueVector) { + try(DoubleBlock.Builder result = driverContext.blockFactory().newDoubleBlockBuilder(positionCount)) { + position: for (int p = 0; p < positionCount; p++) { + try { + result.appendDouble(LogWithBaseInMap.process(valueVector.getDouble(p), this.base)); + } catch (ArithmeticException e) { + warnings().registerException(e); + result.appendNull(); + } + } + return result.build(); + } + } + + @Override + public String toString() { + return "LogWithBaseInMapEvaluator[" + "value=" + value + ", base=" + base + "]"; + } + + @Override + public void close() { + Releasables.closeExpectNoException(value); + } + + private Warnings warnings() { + if (warnings == null) { + this.warnings = Warnings.createWarnings( + driverContext.warningsMode(), + source.source().getLineNumber(), + source.source().getColumnNumber(), + source.text() + ); + } + return warnings; + } + + static class Factory implements EvalOperator.ExpressionEvaluator.Factory { + private final Source source; + + private final EvalOperator.ExpressionEvaluator.Factory value; + + private final double base; + + public Factory(Source source, EvalOperator.ExpressionEvaluator.Factory value, double base) { + this.source = source; + this.value = value; + this.base = base; + } + + @Override + public LogWithBaseInMapEvaluator get(DriverContext context) { + return new LogWithBaseInMapEvaluator(source, value.get(context), base, context); + } + + @Override + public String toString() { + return "LogWithBaseInMapEvaluator[" + "value=" + value + ", base=" + base + "]"; + } + } +} diff --git a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java index c9c6e3806cb04..9cc233b8aff0c 100644 --- a/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java +++ b/x-pack/plugin/esql/src/main/generated/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundUnsignedLongEvaluator.java @@ -4,6 +4,7 @@ // 2.0. package org.elasticsearch.xpack.esql.expression.function.scalar.math; +import java.lang.ArithmeticException; import java.lang.IllegalArgumentException; import java.lang.Override; import java.lang.String; @@ -52,7 +53,7 @@ public Block eval(Page page) { if (decimalsVector == null) { return eval(page.getPositionCount(), valBlock, decimalsBlock); } - return eval(page.getPositionCount(), valVector, decimalsVector).asBlock(); + return eval(page.getPositionCount(), valVector, decimalsVector); } } } @@ -82,16 +83,26 @@ public LongBlock eval(int positionCount, LongBlock valBlock, LongBlock decimalsB result.appendNull(); continue position; } - result.appendLong(Round.processUnsignedLong(valBlock.getLong(valBlock.getFirstValueIndex(p)), decimalsBlock.getLong(decimalsBlock.getFirstValueIndex(p)))); + try { + result.appendLong(Round.processUnsignedLong(valBlock.getLong(valBlock.getFirstValueIndex(p)), decimalsBlock.getLong(decimalsBlock.getFirstValueIndex(p)))); + } catch (ArithmeticException e) { + warnings().registerException(e); + result.appendNull(); + } } return result.build(); } } - public LongVector eval(int positionCount, LongVector valVector, LongVector decimalsVector) { - try(LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(positionCount)) { + public LongBlock eval(int positionCount, LongVector valVector, LongVector decimalsVector) { + try(LongBlock.Builder result = driverContext.blockFactory().newLongBlockBuilder(positionCount)) { position: for (int p = 0; p < positionCount; p++) { - result.appendLong(p, Round.processUnsignedLong(valVector.getLong(p), decimalsVector.getLong(p))); + try { + result.appendLong(Round.processUnsignedLong(valVector.getLong(p), decimalsVector.getLong(p))); + } catch (ArithmeticException e) { + warnings().registerException(e); + result.appendNull(); + } } return result.build(); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 00d239ac9ac1e..00c07d3cbb3e8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -192,6 +192,11 @@ public enum Cap { */ FN_SUBSTRING_EMPTY_NULL, + /** + * Fixes on function {@code ROUND} that avoid it throwing exceptions on runtime for unsigned long cases. + */ + FN_ROUND_UL_FIXES, + /** * All functions that take TEXT should never emit TEXT, only KEYWORD. #114334 */ @@ -703,7 +708,7 @@ public enum Cap { /** * Support the "METADATA _score" directive to enable _score column. */ - METADATA_SCORE(Build.current().isSnapshot()), + METADATA_SCORE, /** * Term function @@ -728,7 +733,12 @@ public enum Cap { /** * Change field caps response for semantic_text fields to be reported as text */ - SEMANTIC_TEXT_FIELD_CAPS; + SEMANTIC_TEXT_FIELD_CAPS, + + /** + * Support named argument for function in map format. + */ + OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION(Build.current().isSnapshot()); private final boolean enabled; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index a11b511cb83b7..fc1b7f6329ab3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -269,7 +269,13 @@ private LogicalPlan resolveIndex(UnresolvedRelation plan, IndexResolution indexR } var attributes = mappingAsAttributes(plan.source(), esIndex.mapping()); attributes.addAll(plan.metadataFields()); - return new EsRelation(plan.source(), esIndex, attributes.isEmpty() ? NO_FIELDS : attributes, plan.indexMode()); + return new EsRelation( + plan.source(), + esIndex.name(), + plan.indexMode(), + esIndex.indexNameWithModes(), + attributes.isEmpty() ? NO_FIELDS : attributes + ); } } @@ -1371,9 +1377,13 @@ private LogicalPlan doRule(LogicalPlan plan) { } if (missing.isEmpty() == false) { - List newOutput = new ArrayList<>(esr.output()); - newOutput.addAll(missing); - return new EsRelation(esr.source(), esr.index(), newOutput, esr.indexMode(), esr.frozen()); + return new EsRelation( + esr.source(), + esr.indexPattern(), + esr.indexMode(), + esr.indexNameWithModes(), + CollectionUtils.combine(esr.output(), missing) + ); } return esr; }); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TranslationAware.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TranslationAware.java new file mode 100644 index 0000000000000..8ef528b6668ab --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/capabilities/TranslationAware.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.capabilities; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; + +/** + * Expressions implementing this interface can get called on data nodes to provide an Elasticsearch/Lucene query. + */ +public interface TranslationAware { + /** + * Indicates whether the expression can be translated or not. + * Usually checks whether the expression arguments are actual fields that exist in Lucene. + */ + boolean translatable(LucenePushdownPredicates pushdownPredicates); + + /** + * Translates the implementing expression into a Query. + * If during translation a child needs to be translated first, the handler needs to be used even if the child implements this + * interface as well. This is to ensure that the child is wrapped in a SingleValueQuery if necessary. + *

    So use this:

    + *

    {@code Query childQuery = handler.asQuery(child);}

    + *

    and not this:

    + *

    {@code Query childQuery = child.asQuery(handler);}

    + */ + Query asQuery(TranslatorHandler handler); + + /** + * Subinterface for expressions that can only process single values (and null out on MVs). + */ + interface SingleValueTranslationAware extends TranslationAware { + /** + * Returns the field that only supports single-value semantics. + */ + Expression singleValueField(); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java index a486d574ddd84..5b630c1a83874 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/AbstractLookupService.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.CheckedBiFunction; +import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; @@ -129,10 +130,10 @@ * the same number of rows that it was sent no matter how many documents match. *

    */ -abstract class AbstractLookupService { +public abstract class AbstractLookupService { private final String actionName; private final ClusterService clusterService; - private final SearchService searchService; + private final CreateShardContext createShardContext; private final TransportService transportService; private final Executor executor; private final BigArrays bigArrays; @@ -151,7 +152,7 @@ abstract class AbstractLookupService final List releasables = new ArrayList<>(6); boolean started = false; try { - final ShardSearchRequest shardSearchRequest = new ShardSearchRequest(request.shardId, 0, AliasFilter.EMPTY); - final SearchContext searchContext = searchService.createSearchContext(shardSearchRequest, SearchService.NO_TIMEOUT); - releasables.add(searchContext); + LookupShardContext shardContext = createShardContext.create(request.shardId); + releasables.add(shardContext.release); final LocalCircuitBreaker localBreaker = new LocalCircuitBreaker( blockFactory.breaker(), localBreakerSettings.overReservedBytes(), @@ -363,8 +363,7 @@ private void doLookup(T request, CancellableTask task, ActionListener } } releasables.add(finishPages); - SearchExecutionContext searchExecutionContext = searchContext.getSearchExecutionContext(); - QueryList queryList = queryList(request, searchExecutionContext, inputBlock, request.inputDataType); + QueryList queryList = queryList(request, shardContext.executionContext, inputBlock, request.inputDataType); var warnings = Warnings.createWarnings( DriverContext.WarningsMode.COLLECT, request.source.source().getLineNumber(), @@ -375,11 +374,11 @@ private void doLookup(T request, CancellableTask task, ActionListener driverContext.blockFactory(), EnrichQuerySourceOperator.DEFAULT_MAX_PAGE_SIZE, queryList, - searchExecutionContext.getIndexReader(), + shardContext.context.searcher().getIndexReader(), warnings ); releasables.add(queryOperator); - var extractFieldsOperator = extractFieldsOperator(searchContext, driverContext, request.extractFields); + var extractFieldsOperator = extractFieldsOperator(shardContext.context, driverContext, request.extractFields); releasables.add(extractFieldsOperator); /* @@ -402,20 +401,32 @@ private void doLookup(T request, CancellableTask task, ActionListener List.of(extractFieldsOperator, finishPages), outputOperator, Driver.DEFAULT_STATUS_INTERVAL, - Releasables.wrap(searchContext, localBreaker) + Releasables.wrap(shardContext.release, localBreaker) ); task.addListener(() -> { String reason = Objects.requireNonNullElse(task.getReasonCancelled(), "task was cancelled"); driver.cancel(reason); }); var threadContext = transportService.getThreadPool().getThreadContext(); - Driver.start(threadContext, executor, driver, Driver.DEFAULT_MAX_ITERATIONS, listener.map(ignored -> { - List out = collectedPages; - if (mergePages && out.isEmpty()) { - out = List.of(createNullResponse(request.inputPage.getPositionCount(), request.extractFields)); + Driver.start(threadContext, executor, driver, Driver.DEFAULT_MAX_ITERATIONS, new ActionListener() { + @Override + public void onResponse(Void unused) { + List out = collectedPages; + if (mergePages && out.isEmpty()) { + out = List.of(createNullResponse(request.inputPage.getPositionCount(), request.extractFields)); + } + listener.onResponse(out); + } + + @Override + public void onFailure(Exception e) { + Releasables.closeExpectNoException(Releasables.wrap(() -> Iterators.map(collectedPages.iterator(), p -> () -> { + p.allowPassingToDifferentDriver(); + p.releaseBlocks(); + }))); + listener.onFailure(e); } - return out; - })); + }); started = true; } catch (Exception e) { listener.onFailure(e); @@ -427,15 +438,10 @@ private void doLookup(T request, CancellableTask task, ActionListener } private static Operator extractFieldsOperator( - SearchContext searchContext, + EsPhysicalOperationProviders.ShardContext shardContext, DriverContext driverContext, List extractFields ) { - EsPhysicalOperationProviders.ShardContext shardContext = new EsPhysicalOperationProviders.DefaultShardContext( - 0, - searchContext.getSearchExecutionContext(), - searchContext.request().getAliasFilter() - ); List fields = new ArrayList<>(extractFields.size()); for (NamedExpression extractField : extractFields) { BlockLoader loader = shardContext.blockLoader( @@ -459,7 +465,7 @@ private static Operator extractFieldsOperator( return new ValuesSourceReaderOperator( driverContext.blockFactory(), fields, - List.of(new ValuesSourceReaderOperator.ShardContext(searchContext.searcher().getIndexReader(), searchContext::newSourceLoader)), + List.of(new ValuesSourceReaderOperator.ShardContext(shardContext.searcher().getIndexReader(), shardContext::newSourceLoader)), 0 ); } @@ -667,4 +673,42 @@ public boolean hasReferences() { return refs.hasReferences(); } } + + /** + * Create a {@link LookupShardContext} for a locally allocated {@link ShardId}. + */ + public interface CreateShardContext { + LookupShardContext create(ShardId shardId) throws IOException; + + static CreateShardContext fromSearchService(SearchService searchService) { + return shardId -> { + ShardSearchRequest shardSearchRequest = new ShardSearchRequest(shardId, 0, AliasFilter.EMPTY); + return LookupShardContext.fromSearchContext( + searchService.createSearchContext(shardSearchRequest, SearchService.NO_TIMEOUT) + ); + }; + } + } + + /** + * {@link AbstractLookupService} uses this to power the queries and field loading that + * it needs to perform to actually do the lookup. + */ + public record LookupShardContext( + EsPhysicalOperationProviders.ShardContext context, + SearchExecutionContext executionContext, + Releasable release + ) { + public static LookupShardContext fromSearchContext(SearchContext context) { + return new LookupShardContext( + new EsPhysicalOperationProviders.DefaultShardContext( + 0, + context.getSearchExecutionContext(), + context.request().getAliasFilter() + ), + context.getSearchExecutionContext(), + context + ); + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index e3d962fa9231b..f9ad74b71bef5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -23,7 +23,6 @@ import org.elasticsearch.index.mapper.RangeType; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.search.SearchService; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.security.authz.privilege.ClusterPrivilegeResolver; @@ -48,7 +47,7 @@ public class EnrichLookupService extends AbstractLookupService unaryScalars() { entries.add(IsNull.ENTRY); entries.add(Length.ENTRY); entries.add(Log10.ENTRY); + entries.add(LogWithBaseInMap.ENTRY); entries.add(LTrim.ENTRY); entries.add(Neg.ENTRY); entries.add(Not.ENTRY); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java index bb170f546b54e..3b1e4828f0707 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/EsqlFunctionRegistry.java @@ -70,6 +70,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.date.Now; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.IpPrefix; +import org.elasticsearch.xpack.esql.expression.function.scalar.map.LogWithBaseInMap; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Abs; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Acos; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Asin; @@ -437,6 +438,9 @@ private static FunctionDefinition[][] snapshotFunctions() { // The delay() function is for debug/snapshot environments only and should never be enabled in a non-snapshot build. // This is an experimental function and can be removed without notice. def(Delay.class, Delay::new, "delay"), + // log_with_base_in_map is for debug/snapshot environments only + // and should never be enabled in a non-snapshot build. They are for the purpose of testing MapExpression only. + def(LogWithBaseInMap.class, LogWithBaseInMap::new, "log_with_base_in_map"), def(Rate.class, Rate::withUnresolvedTimestamp, "rate"), def(Term.class, bi(Term::new), "term") } }; } @@ -468,12 +472,53 @@ public static String normalizeName(String name) { return name.toLowerCase(Locale.ROOT); } - public record ArgSignature(String name, String[] type, String description, boolean optional, DataType targetDataType) { + public static class ArgSignature { + protected final String name; + protected final String[] type; + protected final String description; + protected final boolean optional; + protected final DataType targetDataType; + + public ArgSignature(String name, String[] type, String description, boolean optional, DataType targetDataType) { + this.name = name; + this.type = type; + this.description = description; + this.optional = optional; + this.targetDataType = targetDataType; + } public ArgSignature(String name, String[] type, String description, boolean optional) { this(name, type, description, optional, UNSUPPORTED); } + public String name() { + return name; + } + + public String[] type() { + return type; + } + + public String description() { + return description; + } + + public boolean optional() { + return optional; + } + + public DataType targetDataType() { + return targetDataType; + } + + public Map mapParams() { + return Map.of(); + } + + public boolean mapArg() { + return false; + } + @Override public String toString() { return "ArgSignature{" @@ -487,7 +532,45 @@ public String toString() { + optional + ", targetDataType=" + targetDataType - + '}'; + + "}}"; + } + } + + public static class MapArgSignature extends ArgSignature { + private final Map mapParams; + + public MapArgSignature(String description, boolean optional, Map mapParams) { + super("map", new String[] { "map" }, description, optional); + this.mapParams = mapParams; + } + + @Override + public Map mapParams() { + return mapParams; + } + + @Override + public boolean mapArg() { + return true; + } + + @Override + public String toString() { + return "MapArgSignature{" + + "name='map', type='map', description='" + + description + + "', optional=" + + optional + + ", targetDataType=unsupported, mapParams={" + + mapParams.values().stream().map(mapArg -> "{" + mapArg + "}").collect(Collectors.joining(", ")) + + "}}"; + } + } + + public record MapEntryArgSignature(String name, String valueHint, String type, String description) { + @Override + public String toString() { + return "name='" + name + "', values=" + valueHint + ", description='" + description + "'"; } } @@ -549,22 +632,47 @@ public static FunctionDescription description(FunctionDefinition def) { List args = new ArrayList<>(params.length); boolean variadic = false; - boolean isAggregation = functionInfo == null ? false : functionInfo.isAggregation(); + boolean isAggregation = functionInfo != null && functionInfo.isAggregation(); for (int i = 1; i < params.length; i++) { // skipping 1st argument, the source if (Configuration.class.isAssignableFrom(params[i].getType()) == false) { - Param paramInfo = params[i].getAnnotation(Param.class); - String name = paramInfo == null ? params[i].getName() : paramInfo.name(); variadic |= List.class.isAssignableFrom(params[i].getType()); - String[] type = paramInfo == null ? new String[] { "?" } : removeUnderConstruction(paramInfo.type()); - String desc = paramInfo == null ? "" : paramInfo.description().replace('\n', ' '); - boolean optional = paramInfo == null ? false : paramInfo.optional(); - DataType targetDataType = getTargetType(type); - args.add(new EsqlFunctionRegistry.ArgSignature(name, type, desc, optional, targetDataType)); + MapParam mapParamInfo = params[i].getAnnotation(MapParam.class); // refactor this + if (mapParamInfo != null) { + args.add(mapParam(mapParamInfo)); + } else { + Param paramInfo = params[i].getAnnotation(Param.class); + args.add(paramInfo != null ? param(paramInfo) : paramWithoutAnnotation(params[i].getName())); + } } } return new FunctionDescription(def.name(), args, returnType, functionDescription, variadic, isAggregation); } + public static ArgSignature param(Param param) { + String[] type = removeUnderConstruction(param.type()); + String desc = param.description().replace('\n', ' '); + DataType targetDataType = getTargetType(type); + return new EsqlFunctionRegistry.ArgSignature(param.name(), type, desc, param.optional(), targetDataType); + } + + public static ArgSignature mapParam(MapParam mapParam) { + String desc = mapParam.description().replace('\n', ' '); + Map params = new HashMap<>(mapParam.params().length); + for (MapParam.MapParamEntry param : mapParam.params()) { + String valueHint = param.valueHint().length <= 1 + ? Arrays.toString(param.valueHint()) + : "[" + String.join(", ", param.valueHint()) + "]"; + String type = param.type().length <= 1 ? Arrays.toString(param.type()) : "[" + String.join(", ", param.type()) + "]"; + MapEntryArgSignature mapArg = new MapEntryArgSignature(param.name(), valueHint, type, param.description()); + params.put(param.name(), mapArg); + } + return new EsqlFunctionRegistry.MapArgSignature(desc, mapParam.optional(), params); + } + + public static ArgSignature paramWithoutAnnotation(String name) { + return new EsqlFunctionRegistry.ArgSignature(name, new String[] { "?" }, "", false, UNSUPPORTED); + } + /** * Remove types that are being actively built. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/MapParam.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/MapParam.java new file mode 100644 index 0000000000000..87b6df1827680 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/MapParam.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function; + +import java.lang.annotation.ElementType; +import java.lang.annotation.Retention; +import java.lang.annotation.RetentionPolicy; +import java.lang.annotation.Target; + +/** + * Describes a function map argument represented by {@code MapExpression}. + * This is added to support search options used by {@code FullTextFunction}, and can be invoked like: + * FUNCTION(a, b, {"stringArg":"value", "intArg":1, "doubleArg":2.0, "boolArg":true, "arrayArg":["a", "b"]}) + * + * Using {@code Match} function as an example, the {@code Match} function takes two required arguments, + * field name and query text, it creates a {@code MatchQuery} under the cover, and the rest of the {@code MatchQuery} + * options are be grouped together and provided to the {@code Match} function as an optional argument in a map format. + */ +@Retention(RetentionPolicy.RUNTIME) +@Target(ElementType.PARAMETER) +public @interface MapParam { + MapParamEntry[] params() default {}; + + String description() default ""; + + boolean optional() default false; + + @Retention(RetentionPolicy.RUNTIME) + @Target(ElementType.PARAMETER) + @interface MapParamEntry { + String name() default ""; + + // A list of valid values/hints of this parameter, it can be a numeric, boolean, string value or an array of these values. + String[] valueHint() default {}; + + String[] type() default {}; + + String description() default ""; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java index 4da7c01139c24..d2e2135a4bf83 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/FullTextFunction.java @@ -10,29 +10,29 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.capabilities.PostAnalysisPlanVerificationAware; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.common.Failures; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Nullability; -import org.elasticsearch.xpack.esql.core.expression.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.function.Function; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; -import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; -import org.elasticsearch.xpack.esql.core.planner.TranslatorHandler; import org.elasticsearch.xpack.esql.core.querydsl.query.Query; -import org.elasticsearch.xpack.esql.core.querydsl.query.TranslationAwareExpressionQuery; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.Holder; +import org.elasticsearch.xpack.esql.expression.predicate.logical.BinaryLogic; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Filter; import org.elasticsearch.xpack.esql.plan.logical.Limit; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.plan.logical.OrderBy; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; +import org.elasticsearch.xpack.esql.querydsl.query.TranslationAwareExpressionQuery; import java.util.List; import java.util.Locale; @@ -158,21 +158,20 @@ public boolean equals(Object obj) { } @Override - public Query asQuery(TranslatorHandler translatorHandler) { - if (queryBuilder != null) { - return new TranslationAwareExpressionQuery(source(), queryBuilder); - } + public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + return true; + } - ExpressionTranslator translator = translator(); - return translator.translate(this, translatorHandler); + @Override + public Query asQuery(TranslatorHandler handler) { + return queryBuilder != null ? new TranslationAwareExpressionQuery(source(), queryBuilder) : translate(handler); } public QueryBuilder queryBuilder() { return queryBuilder; } - @SuppressWarnings("rawtypes") - protected abstract ExpressionTranslator translator(); + protected abstract Query translate(TranslatorHandler handler); public abstract Expression replaceQueryBuilder(QueryBuilder queryBuilder); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java index 1f7bcadd259a0..12dd7a7a5e904 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Kql.java @@ -13,18 +13,19 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.planner.EsqlExpressionTranslators; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import org.elasticsearch.xpack.esql.querydsl.query.KqlQuery; import java.io.IOException; import java.util.List; +import java.util.Objects; /** * Full text function that performs a {@link KqlQuery} . @@ -88,8 +89,8 @@ protected NodeInfo info() { } @Override - protected ExpressionTranslator translator() { - return new EsqlExpressionTranslators.KqlFunctionTranslator(); + protected Query translate(TranslatorHandler handler) { + return new KqlQuery(source(), Objects.toString(queryAsObject())); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java index 090766c4c90db..b7ebcda70b622 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Match.java @@ -20,23 +20,26 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; -import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.planner.EsqlExpressionTranslators; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; +import org.elasticsearch.xpack.esql.querydsl.query.MatchQuery; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.io.IOException; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.Set; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; @@ -273,8 +276,23 @@ public String functionType() { } @Override - protected ExpressionTranslator translator() { - return new EsqlExpressionTranslators.MatchFunctionTranslator(); + protected Query translate(TranslatorHandler handler) { + Expression fieldExpression = field; + // Field may be converted to other data type (field_name :: data_type), so we need to check the original field + if (fieldExpression instanceof AbstractConvertFunction convertFunction) { + fieldExpression = convertFunction.field(); + } + if (fieldExpression instanceof FieldAttribute fieldAttribute) { + String fieldName = fieldAttribute.name(); + if (fieldAttribute.field() instanceof MultiTypeEsField multiTypeEsField) { + // If we have multiple field types, we allow the query to be done, but getting the underlying field name + fieldName = multiTypeEsField.getName(); + } + // Make query lenient so mixed field types can be queried when a field type is incompatible with the value provided + return new MatchQuery(source(), fieldName, queryAsObject(), Map.of("lenient", "true")); + } + + throw new IllegalArgumentException("Match must have a field attribute as the first argument"); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryString.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryString.java index ea21411d09173..285aa3201c925 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryString.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryString.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -21,10 +21,12 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.planner.EsqlExpressionTranslators; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import java.io.IOException; import java.util.List; +import java.util.Map; +import java.util.Objects; /** * Full text function that performs a {@link QueryStringQuery} . @@ -99,8 +101,8 @@ protected NodeInfo info() { } @Override - protected ExpressionTranslator translator() { - return new EsqlExpressionTranslators.QueryStringFunctionTranslator(); + protected Query translate(TranslatorHandler handler) { + return new QueryStringQuery(source(), Objects.toString(queryAsObject()), Map.of(), Map.of()); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java index d3bb08586f49d..c96d1c46fa529 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/fulltext/Term.java @@ -18,7 +18,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; -import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.querydsl.query.TermQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -26,7 +26,7 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; -import org.elasticsearch.xpack.esql.planner.EsqlExpressionTranslators; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import java.io.IOException; import java.util.List; @@ -129,8 +129,8 @@ protected TypeResolutions.ParamOrdinal queryParamOrdinal() { } @Override - protected ExpressionTranslator translator() { - return new EsqlExpressionTranslators.TermFunctionTranslator(); + protected Query translate(TranslatorHandler handler) { + return new TermQuery(source(), ((FieldAttribute) field()).name(), queryAsObject()); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java index c4b9f6885e617..0d3bacbd47605 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ScalarFunctionWritables.java @@ -8,8 +8,6 @@ package org.elasticsearch.xpack.esql.expression.function.scalar; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.function.grouping.GroupingWritables; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; @@ -49,6 +47,8 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java index 51430603a4077..aae01ab774efa 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/ip/CIDRMatch.java @@ -15,21 +15,30 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.core.querydsl.query.TermsQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.Check; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.expression.function.Example; import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import java.io.IOException; import java.util.Arrays; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Set; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; @@ -48,7 +57,7 @@ *

    * Example: `| eval cidr="10.0.0.0/8" | where cidr_match(ip_field, "127.0.0.1/30", cidr)` */ -public class CIDRMatch extends EsqlScalarFunction { +public class CIDRMatch extends EsqlScalarFunction implements TranslationAware.SingleValueTranslationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Expression.class, "CIDRMatch", @@ -168,4 +177,25 @@ public Expression replaceChildren(List newChildren) { protected NodeInfo info() { return NodeInfo.create(this, CIDRMatch::new, children().get(0), children().subList(1, children().size())); } + + @Override + public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + return pushdownPredicates.isPushableFieldAttribute(ipField) && Expressions.foldable(matches); + } + + @Override + public Query asQuery(TranslatorHandler handler) { + var fa = LucenePushdownPredicates.checkIsFieldAttribute(ipField); + Check.isTrue(Expressions.foldable(matches), "Expected foldable matches, but got [{}]", matches); + + String targetFieldName = handler.nameOf(fa.exactAttribute()); + Set set = new LinkedHashSet<>(Expressions.fold(FoldContext.small() /* TODO remove me */, matches)); + + return new TermsQuery(source(), targetFieldName, set); + } + + @Override + public Expression singleValueField() { + return ipField; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/map/LogWithBaseInMap.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/map/LogWithBaseInMap.java new file mode 100644 index 0000000000000..447bac738162b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/map/LogWithBaseInMap.java @@ -0,0 +1,221 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.map; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.compute.ann.Evaluator; +import org.elasticsearch.compute.ann.Fixed; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.core.expression.EntryExpression; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.MapExpression; +import org.elasticsearch.xpack.esql.core.tree.NodeInfo; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.MapParam; +import org.elasticsearch.xpack.esql.expression.function.OptionalArgument; +import org.elasticsearch.xpack.esql.expression.function.Param; +import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; +import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast; +import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isFoldable; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isMapExpression; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; +import static org.elasticsearch.xpack.esql.core.type.DataType.DOUBLE; + +public class LogWithBaseInMap extends EsqlScalarFunction implements OptionalArgument { + public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( + Expression.class, + "LogWithBaseInMap", + LogWithBaseInMap::new + ); + + private final Expression number; + + private final Expression map; + + private static final String BASE = "base"; + + @FunctionInfo( + returnType = "double", + description = "Returns the logarithm of a value to a base. The input can be any numeric value, " + + "the return value is always a double.\n" + + "\n" + + "Logs of zero, negative numbers, and base of one return `null` as well as a warning." + ) + public LogWithBaseInMap( + Source source, + @Param( + name = "number", + type = { "double", "integer", "long" }, + description = "Numeric expression. If `null`, the function returns `null`." + ) Expression number, + @MapParam( + params = { @MapParam.MapParamEntry(name = "base", valueHint = { "2", "2.0" }) }, + description = "Input value. The input is a valid constant map expression.", + optional = true + ) Expression option + ) { + super(source, option == null ? Collections.singletonList(number) : List.of(number, option)); + this.number = number; + this.map = option; + } + + private LogWithBaseInMap(StreamInput in) throws IOException { + this( + Source.readFrom((PlanStreamInput) in), + in.readNamedWriteable(Expression.class), + in.readOptionalNamedWriteable(Expression.class) + ); + } + + @Override + public final void writeTo(StreamOutput out) throws IOException { + source().writeTo(out); + out.writeNamedWriteable(number); + out.writeOptionalNamedWriteable(map); + } + + @Override + public String getWriteableName() { + return ENTRY.name; + } + + @Override + protected TypeResolution resolveType() { + if (childrenResolved() == false) { + return new TypeResolution("Unresolved children"); + } + // validate field type + TypeResolution resolution = isNumeric(number, sourceText(), FIRST); + if (resolution.unresolved()) { + return resolution; + } + + if (map != null) { + // MapExpression does not have a DataType associated with it + resolution = isMapExpression(map, sourceText(), SECOND); + if (resolution.unresolved()) { + return resolution; + } + return validateOptions(); + } + return TypeResolution.TYPE_RESOLVED; + } + + @Override + public DataType dataType() { + return DOUBLE; + } + + @Override + public boolean foldable() { + return number.foldable(); + } + + @Override + public Expression replaceChildren(List newChildren) { + return new LogWithBaseInMap(source(), newChildren.get(0), newChildren.size() > 1 ? newChildren.get(1) : null); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, LogWithBaseInMap::new, number, map); + } + + @Override + public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { + var valueEval = Cast.cast(source(), number.dataType(), DataType.DOUBLE, toEvaluator.apply(number)); + double base = Math.E; + if (map instanceof MapExpression me) { + Expression b = me.get(BASE); + if (b != null && b.foldable()) { + Object v = b.fold(toEvaluator.foldCtx()); + if (v instanceof BytesRef br) { + v = br.utf8ToString(); + } + base = Double.parseDouble(v.toString()); + } + } + return new LogWithBaseInMapEvaluator.Factory(source(), valueEval, base); + } + + @Evaluator(warnExceptions = { ArithmeticException.class }) + static double process(double value, @Fixed double base) throws ArithmeticException { + if (base <= 0d || value <= 0d) { + throw new ArithmeticException("Log of non-positive number"); + } + if (base == 1d) { + throw new ArithmeticException("Log of base 1"); + } + return Math.log10(value) / Math.log10(base); + } + + public Expression number() { + return number; + } + + public Expression base() { + return map; + } + + private TypeResolution validateOptions() { + for (EntryExpression entry : ((MapExpression) map).entryExpressions()) { + Expression key = entry.key(); + Expression value = entry.value(); + TypeResolution resolution = isFoldable(key, sourceText(), SECOND).and(isFoldable(value, sourceText(), SECOND)); + if (resolution.unresolved()) { + return resolution; + } + Object k = key instanceof Literal l ? l.value() : null; + Object v = value instanceof Literal l ? l.value() : null; + if (k == null) { + return new TypeResolution( + format(null, "Invalid option key in [{}], expected a literal value but got [{}]", sourceText(), key.sourceText()) + ); + } + + if (v == null) { + return new TypeResolution( + format(null, "Invalid option value in [{}], expected a constant value but got [{}]", sourceText(), value.sourceText()) + ); + } + String base = k instanceof BytesRef br ? br.utf8ToString() : k.toString(); + String number = v instanceof BytesRef br ? br.utf8ToString() : v.toString(); + // validate the key is in SUPPORTED_OPTIONS + if (base.equals(BASE) == false) { + return new TypeResolution( + format(null, "Invalid option key in [{}], expected base but got [{}]", sourceText(), key.sourceText()) + ); + } + // validate the value is valid for the key provided + try { + Double.parseDouble(number); + } catch (NumberFormatException e) { + return new TypeResolution( + format(null, "Invalid option value in [{}], expected a numeric number but got [{}]", sourceText(), v) + ); + } + + } + return TypeResolution.TYPE_RESOLVED; + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java index b1baa6c55ce47..7c977fd1ce5a6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/Round.java @@ -35,7 +35,7 @@ import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.FIRST; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.SECOND; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isNumeric; -import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isWholeNumber; +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isType; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.unsignedLongAsNumber; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.bigIntegerToUnsignedLong; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.longToUnsignedLong; @@ -63,7 +63,7 @@ public Round( @Param( optional = true, name = "decimals", - type = { "integer" }, // TODO long is supported here too + type = { "integer", "long" }, description = "The number of decimal places to round to. Defaults to 0. If `null`, the function returns `null`." ) Expression decimals ) { @@ -103,7 +103,15 @@ protected TypeResolution resolveType() { return resolution; } - return decimals == null ? TypeResolution.TYPE_RESOLVED : isWholeNumber(decimals, sourceText(), SECOND); + return decimals == null + ? TypeResolution.TYPE_RESOLVED + : isType( + decimals, + dt -> dt.isWholeNumber() && dt != DataType.UNSIGNED_LONG, + sourceText(), + SECOND, + "whole number except unsigned_long or counter types" + ); } @Override @@ -123,11 +131,16 @@ static int process(int val, long decimals) { @Evaluator(extraName = "Long") static long process(long val, long decimals) { - return Maths.round(val, decimals).longValue(); + return Maths.round(val, decimals); } - @Evaluator(extraName = "UnsignedLong") + @Evaluator(extraName = "UnsignedLong", warnExceptions = ArithmeticException.class) static long processUnsignedLong(long val, long decimals) { + if (decimals <= -20) { + // Unsigned long max value is 2^64 - 1, which has 20 digits + return longToUnsignedLong(0, false); + } + Number ul = unsignedLongAsNumber(val); if (ul instanceof BigInteger bi) { BigInteger rounded = Maths.round(bi, decimals); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java index bcd6f4c30bf8a..7ec7d1b9b2eca 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppend.java @@ -55,6 +55,7 @@ public class MvAppend extends EsqlScalarFunction implements EvaluatorMapper { "cartesian_point", "cartesian_shape", "date", + "date_nanos", "double", "geo_point", "geo_shape", @@ -62,6 +63,7 @@ public class MvAppend extends EsqlScalarFunction implements EvaluatorMapper { "ip", "keyword", "long", + "unsigned_long", "version" }, description = "Concatenates values of two multi-value fields." ) @@ -74,6 +76,7 @@ public MvAppend( "cartesian_point", "cartesian_shape", "date", + "date_nanos", "double", "geo_point", "geo_shape", @@ -82,6 +85,7 @@ public MvAppend( "keyword", "long", "text", + "unsigned_long", "version" } ) Expression field1, @Param( @@ -91,6 +95,7 @@ public MvAppend( "cartesian_point", "cartesian_shape", "date", + "date_nanos", "double", "geo_point", "geo_shape", @@ -99,6 +104,7 @@ public MvAppend( "keyword", "long", "text", + "unsigned_long", "version" } ) Expression field2 ) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java index 9a2b041fafeb6..4d85d3a6a8d2e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupe.java @@ -46,6 +46,7 @@ public class MvDedupe extends AbstractMultivalueFunction { "ip", "keyword", "long", + "unsigned_long", "version" }, description = "Remove duplicate values from a multivalued field.", note = "`MV_DEDUPE` may, but won't always, sort the values in the column.", @@ -69,6 +70,7 @@ public MvDedupe( "keyword", "long", "text", + "unsigned_long", "version" }, description = "Multivalue expression." ) Expression field diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java index 4a04524d1b23d..3bffb7853d3b4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSlice.java @@ -67,6 +67,7 @@ public class MvSlice extends EsqlScalarFunction implements OptionalArgument, Eva "ip", "keyword", "long", + "unsigned_long", "version" }, description = """ Returns a subset of the multivalued field using the start and end index values. @@ -96,6 +97,7 @@ public MvSlice( "keyword", "long", "text", + "unsigned_long", "version" }, description = "Multivalue expression. If `null`, the function returns `null`." ) Expression field, diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java index 4d08b0e9687ec..25c0607155afd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/BinarySpatialFunction.java @@ -13,12 +13,14 @@ import org.elasticsearch.geometry.Geometry; import org.elasticsearch.lucene.spatial.CoordinateEncoder; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.expression.function.scalar.BinaryScalarFunction; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; import org.elasticsearch.xpack.esql.expression.EsqlTypeResolutions; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; import java.io.IOException; import java.util.Objects; @@ -260,4 +262,20 @@ protected Geometry fromBytesRef(BytesRef bytesRef) { protected abstract T compare(BytesRef left, BytesRef right) throws IOException; } + + /** + * Push-down to Lucene is only possible if one field is an indexed spatial field, and the other is a constant spatial or string column. + */ + public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + // The use of foldable here instead of SpatialEvaluatorFieldKey.isConstant is intentional to match the behavior of the + // Lucene pushdown code in EsqlTranslationHandler::SpatialRelatesTranslator + // We could enhance both places to support ReferenceAttributes that refer to constants, but that is a larger change + return isPushableSpatialAttribute(left(), pushdownPredicates) && right().foldable() + || isPushableSpatialAttribute(right(), pushdownPredicates) && left().foldable(); + + } + + private static boolean isPushableSpatialAttribute(Expression exp, LucenePushdownPredicates p) { + return exp instanceof FieldAttribute fa && DataType.isSpatial(fa.dataType()) && fa.getExactInfo().hasExact() && p.isIndexed(fa); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java index 8ca89334b059b..295116a5e99c2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/spatial/SpatialRelatesFunction.java @@ -17,15 +17,26 @@ import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.geometry.Geometry; import org.elasticsearch.index.mapper.ShapeIndexer; import org.elasticsearch.lucene.spatial.Component2DVisitor; import org.elasticsearch.lucene.spatial.CoordinateEncoder; import org.elasticsearch.lucene.spatial.GeometryDocValueReader; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; +import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.Check; import org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; +import org.elasticsearch.xpack.esql.querydsl.query.SpatialRelatesQuery; import java.io.IOException; import java.util.Map; @@ -36,7 +47,8 @@ public abstract class SpatialRelatesFunction extends BinarySpatialFunction implements EvaluatorMapper, - SpatialEvaluatorFactory.SpatialSourceSupplier { + SpatialEvaluatorFactory.SpatialSourceSupplier, + TranslationAware { protected SpatialRelatesFunction(Source source, Expression left, Expression right, boolean leftDocValues, boolean rightDocValues) { super(source, left, right, leftDocValues, rightDocValues, false); @@ -164,4 +176,44 @@ protected void processPointDocValuesAndSource( } } } + + @Override + public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + return super.translatable(pushdownPredicates); // only for the explicit Override, as only this subclass implements TranslationAware + } + + @Override + public Query asQuery(TranslatorHandler handler) { + if (left().foldable()) { + checkSpatialRelatesFunction(left(), queryRelation()); + return translate(handler, right(), left()); + } else { + checkSpatialRelatesFunction(right(), queryRelation()); + return translate(handler, left(), right()); + } + + } + + private static void checkSpatialRelatesFunction(Expression constantExpression, ShapeRelation queryRelation) { + Check.isTrue( + constantExpression.foldable(), + "Line {}:{}: Comparisons against fields are not (currently) supported; offender [{}] in [ST_{}]", + constantExpression.sourceLocation().getLineNumber(), + constantExpression.sourceLocation().getColumnNumber(), + Expressions.name(constantExpression), + queryRelation + ); + } + + private Query translate(TranslatorHandler handler, Expression spatialExpression, Expression constantExpression) { + TypedAttribute attribute = LucenePushdownPredicates.checkIsPushableAttribute(spatialExpression); + String name = handler.nameOf(attribute); + + try { + Geometry shape = SpatialRelatesUtils.makeGeometryFromLiteral(FoldContext.small() /* TODO remove me */, constantExpression); + return new SpatialRelatesQuery(source(), name, queryRelation(), shape, attribute.dataType()); + } catch (IllegalArgumentException e) { + throw new QlIllegalArgumentException(e.getMessage(), e); + } + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java index fb0aac0c85b38..688cbbb992443 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLike.java @@ -11,9 +11,12 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.core.querydsl.query.RegexQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; @@ -21,13 +24,18 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import java.io.IOException; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; -public class RLike extends org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLike implements EvaluatorMapper { +public class RLike extends org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLike + implements + EvaluatorMapper, + TranslationAware.SingleValueTranslationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "RLike", RLike::new); @FunctionInfo(returnType = "boolean", description = """ @@ -103,4 +111,21 @@ public Boolean fold(FoldContext ctx) { public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvaluator) { return AutomataMatch.toEvaluator(source(), toEvaluator.apply(field()), pattern().createAutomaton()); } + + @Override + public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + return pushdownPredicates.isPushableFieldAttribute(field()); + } + + @Override + public Query asQuery(TranslatorHandler handler) { + var fa = LucenePushdownPredicates.checkIsFieldAttribute(field()); + // TODO: see whether escaping is needed + return new RegexQuery(source(), handler.nameOf(fa.exactAttribute()), pattern().asJavaRegex(), caseInsensitive()); + } + + @Override + public Expression singleValueField() { + return field(); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java index 65455c708cc9b..8c596ee032bee 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLike.java @@ -12,9 +12,13 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.core.querydsl.query.WildcardQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; @@ -22,13 +26,18 @@ import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import java.io.IOException; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isString; -public class WildcardLike extends org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike implements EvaluatorMapper { +public class WildcardLike extends org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike + implements + EvaluatorMapper, + TranslationAware.SingleValueTranslationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Expression.class, "WildcardLike", @@ -114,4 +123,26 @@ public EvalOperator.ExpressionEvaluator.Factory toEvaluator(ToEvaluator toEvalua pattern().pattern().length() == 0 ? Automata.makeEmptyString() : pattern().createAutomaton() ); } + + @Override + public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + return pushdownPredicates.isPushableAttribute(field()); + } + + @Override + public Query asQuery(TranslatorHandler handler) { + var field = field(); + LucenePushdownPredicates.checkIsPushableAttribute(field); + return translateField(handler.nameOf(field instanceof FieldAttribute fa ? fa.exactAttribute() : field)); + } + + // TODO: see whether escaping is needed + private Query translateField(String targetFieldName) { + return new WildcardQuery(source(), targetFieldName, pattern().asLuceneWildcard(), caseInsensitive()); + } + + @Override + public Expression singleValueField() { + return field(); + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Predicates.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/Predicates.java similarity index 96% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Predicates.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/Predicates.java index 32f7e181933b4..64fd63a844957 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Predicates.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/Predicates.java @@ -4,13 +4,13 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.predicate; +package org.elasticsearch.xpack.esql.expression.predicate; import org.elasticsearch.core.Tuple; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import java.util.ArrayList; import java.util.List; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Range.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/Range.java similarity index 63% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Range.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/Range.java index a4e4685f764e8..4da10c5ec7b8c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/Range.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/Range.java @@ -4,17 +4,24 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.predicate; +package org.elasticsearch.xpack.esql.expression.predicate; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.core.querydsl.query.RangeQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; +import org.elasticsearch.xpack.versionfield.Version; import java.io.IOException; import java.time.DateTimeException; @@ -23,10 +30,19 @@ import java.util.Objects; import static java.util.Arrays.asList; +import static org.elasticsearch.xpack.esql.core.expression.Foldables.valueOf; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; +import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; import static org.elasticsearch.xpack.esql.core.util.DateUtils.asDateTime; +import static org.elasticsearch.xpack.esql.core.util.NumericUtils.unsignedLongAsNumber; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.ipToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.versionToString; // BETWEEN or range - is a mix of gt(e) AND lt(e) -public class Range extends ScalarFunction { +public class Range extends ScalarFunction implements TranslationAware.SingleValueTranslationAware { private final Expression value, lower, upper; private final boolean includeLower, includeUpper; @@ -177,4 +193,62 @@ public boolean equals(Object obj) { && Objects.equals(upper, other.upper) && Objects.equals(zoneId, other.zoneId); } + + @Override + public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + return pushdownPredicates.isPushableAttribute(value) && lower.foldable() && upper.foldable(); + } + + @Override + public Query asQuery(TranslatorHandler handler) { + return translate(handler); + } + + private RangeQuery translate(TranslatorHandler handler) { + Object l = valueOf(FoldContext.small() /* TODO remove me */, lower); + Object u = valueOf(FoldContext.small() /* TODO remove me */, upper); + String format = null; + + DataType dataType = value.dataType(); + if (DataType.isDateTime(dataType) && DataType.isDateTime(lower.dataType()) && DataType.isDateTime(upper.dataType())) { + l = dateTimeToString((Long) l); + u = dateTimeToString((Long) u); + format = DEFAULT_DATE_TIME_FORMATTER.pattern(); + } + + if (dataType == IP) { + if (l instanceof BytesRef bytesRef) { + l = ipToString(bytesRef); + } + if (u instanceof BytesRef bytesRef) { + u = ipToString(bytesRef); + } + } else if (dataType == VERSION) { + // VersionStringFieldMapper#indexedValueForSearch() only accepts as input String or BytesRef with the String (i.e. not + // encoded) representation of the version as it'll do the encoding itself. + if (l instanceof BytesRef bytesRef) { + l = versionToString(bytesRef); + } else if (l instanceof Version version) { + l = versionToString(version); + } + if (u instanceof BytesRef bytesRef) { + u = versionToString(bytesRef); + } else if (u instanceof Version version) { + u = versionToString(version); + } + } else if (dataType == UNSIGNED_LONG) { + if (l instanceof Long ul) { + l = unsignedLongAsNumber(ul); + } + if (u instanceof Long ul) { + u = unsignedLongAsNumber(ul); + } + } + return new RangeQuery(source(), handler.nameOf(value), l, includeLower(), u, includeUpper(), format, zoneId); + } + + @Override + public Expression singleValueField() { + return value; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/fulltext/MultiMatchQueryPredicate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/fulltext/MultiMatchQueryPredicate.java index 5d165d9ea01f7..eb6c04be58218 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/fulltext/MultiMatchQueryPredicate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/fulltext/MultiMatchQueryPredicate.java @@ -9,9 +9,14 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; +import org.elasticsearch.xpack.esql.querydsl.query.MultiMatchQuery; import java.io.IOException; import java.util.List; @@ -20,7 +25,7 @@ import static java.util.Collections.emptyList; -public class MultiMatchQueryPredicate extends FullTextPredicate { +public class MultiMatchQueryPredicate extends FullTextPredicate implements TranslationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Expression.class, @@ -88,4 +93,14 @@ public boolean equals(Object obj) { public String getWriteableName() { return ENTRY.name; } + + @Override + public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + return true; // needs update if we'll ever validate the fields + } + + @Override + public Query asQuery(TranslatorHandler handler) { + return new MultiMatchQuery(source(), query(), fields(), this); + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/And.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/And.java similarity index 92% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/And.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/And.java index d2b801a012d0c..8039ad2cf3702 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/And.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/And.java @@ -4,15 +4,15 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.predicate.logical; +package org.elasticsearch.xpack.esql.expression.predicate.logical; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; import java.io.IOException; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/BinaryLogic.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/BinaryLogic.java new file mode 100644 index 0000000000000..7adfc668bae15 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/BinaryLogic.java @@ -0,0 +1,111 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.xpack.esql.expression.predicate.logical; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Nullability; +import org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal; +import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; +import org.elasticsearch.xpack.esql.core.querydsl.query.BoolQuery; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.Check; +import org.elasticsearch.xpack.esql.core.util.CollectionUtils; +import org.elasticsearch.xpack.esql.core.util.PlanStreamInput; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; + +import java.io.IOException; +import java.util.Arrays; +import java.util.List; + +import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isBoolean; + +public abstract class BinaryLogic extends BinaryOperator implements TranslationAware { + + protected BinaryLogic(Source source, Expression left, Expression right, BinaryLogicOperation operation) { + super(source, left, right, operation); + } + + protected BinaryLogic(StreamInput in, BinaryLogicOperation op) throws IOException { + this( + Source.readFrom((StreamInput & PlanStreamInput) in), + in.readNamedWriteable(Expression.class), + in.readNamedWriteable(Expression.class), + op + ); + } + + @Override + public final void writeTo(StreamOutput out) throws IOException { + Source.EMPTY.writeTo(out); + out.writeNamedWriteable(left()); + out.writeNamedWriteable(right()); + } + + @Override + public DataType dataType() { + return DataType.BOOLEAN; + } + + @Override + protected TypeResolution resolveInputType(Expression e, ParamOrdinal paramOrdinal) { + return isBoolean(e, sourceText(), paramOrdinal); + } + + @Override + public Nullability nullable() { + // Cannot fold null due to 3vl, constant folding will do any possible folding. + return Nullability.UNKNOWN; + } + + @Override + protected boolean isCommutative() { + return true; + } + + @Override + public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + return left() instanceof TranslationAware leftAware + && leftAware.translatable(pushdownPredicates) + && right() instanceof TranslationAware rightAware + && rightAware.translatable(pushdownPredicates); + } + + @Override + public Query asQuery(TranslatorHandler handler) { + return boolQuery(source(), handler.asQuery(left()), handler.asQuery(right()), this instanceof And); + } + + public static Query boolQuery(Source source, Query left, Query right, boolean isAnd) { + Check.isTrue(left != null || right != null, "Both expressions are null"); + if (left == null) { + return right; + } + if (right == null) { + return left; + } + List queries; + // check if either side is already a bool query to an extra bool query + if (left instanceof BoolQuery leftBool && leftBool.isAnd() == isAnd) { + if (right instanceof BoolQuery rightBool && rightBool.isAnd() == isAnd) { + queries = CollectionUtils.combine(leftBool.queries(), rightBool.queries()); + } else { + queries = CollectionUtils.combine(leftBool.queries(), right); + } + } else if (right instanceof BoolQuery bool && bool.isAnd() == isAnd) { + queries = CollectionUtils.combine(bool.queries(), left); + } else { + queries = Arrays.asList(left, right); + } + return new BoolQuery(source, isAnd, queries); + } +} diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicOperation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/BinaryLogicOperation.java similarity index 95% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicOperation.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/BinaryLogicOperation.java index 8b8224334654a..5a5fad32327fb 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicOperation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/BinaryLogicOperation.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.predicate.logical; +package org.elasticsearch.xpack.esql.expression.predicate.logical; import org.elasticsearch.xpack.esql.core.expression.predicate.PredicateBiFunction; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Not.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/Not.java similarity index 80% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Not.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/Not.java index 218f61856accc..8d11916a24038 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Not.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/Not.java @@ -4,25 +4,29 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.predicate.logical; +package org.elasticsearch.xpack.esql.expression.predicate.logical; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import java.io.IOException; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.isBoolean; -public class Not extends UnaryScalarFunction implements Negatable { +public class Not extends UnaryScalarFunction implements Negatable, TranslationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "Not", Not::new); public Not(Source source, Expression child) { @@ -94,4 +98,14 @@ public DataType dataType() { static Expression negate(Expression exp) { return exp instanceof Negatable ? ((Negatable) exp).negate() : new Not(exp.source(), exp); } + + @Override + public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + return field() instanceof TranslationAware aware && aware.translatable(pushdownPredicates); + } + + @Override + public Query asQuery(TranslatorHandler handler) { + return handler.asQuery(field()).negate(source()); + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Or.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/Or.java similarity index 92% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Or.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/Or.java index bf7a16aec8df9..b9db3e3b94507 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/Or.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/logical/Or.java @@ -4,15 +4,15 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.predicate.logical; +package org.elasticsearch.xpack.esql.expression.predicate.logical; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; import java.io.IOException; diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNotNull.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/nulls/IsNotNull.java similarity index 73% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNotNull.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/nulls/IsNotNull.java index f5542ff7c3de5..93f7df023d89c 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNotNull.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/nulls/IsNotNull.java @@ -4,22 +4,27 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.predicate.nulls; +package org.elasticsearch.xpack.esql.expression.predicate.nulls; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; +import org.elasticsearch.xpack.esql.core.querydsl.query.ExistsQuery; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import java.io.IOException; -public class IsNotNull extends UnaryScalarFunction implements Negatable { +public class IsNotNull extends UnaryScalarFunction implements Negatable, TranslationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Expression.class, "IsNotNull", @@ -68,4 +73,14 @@ public DataType dataType() { public UnaryScalarFunction negate() { return new IsNull(source(), field()); } + + @Override + public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + return IsNull.isTranslatable(field(), pushdownPredicates); + } + + @Override + public Query asQuery(TranslatorHandler handler) { + return new ExistsQuery(source(), handler.nameOf(field())); + } } diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNull.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/nulls/IsNull.java similarity index 65% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNull.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/nulls/IsNull.java index bb85791a9f85e..bbd47e27e652f 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/expression/predicate/nulls/IsNull.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/nulls/IsNull.java @@ -4,22 +4,28 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.predicate.nulls; +package org.elasticsearch.xpack.esql.expression.predicate.nulls; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.Negatable; +import org.elasticsearch.xpack.esql.core.querydsl.query.ExistsQuery; +import org.elasticsearch.xpack.esql.core.querydsl.query.NotQuery; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import java.io.IOException; -public class IsNull extends UnaryScalarFunction implements Negatable { +public class IsNull extends UnaryScalarFunction implements Negatable, TranslationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "IsNull", IsNull::new); public IsNull(Source source, Expression field) { @@ -64,4 +70,18 @@ public DataType dataType() { public UnaryScalarFunction negate() { return new IsNotNull(source(), field()); } + + @Override + public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + return isTranslatable(field(), pushdownPredicates); + } + + protected static boolean isTranslatable(Expression field, LucenePushdownPredicates pushdownPredicates) { + return LucenePushdownPredicates.isPushableTextFieldAttribute(field) || pushdownPredicates.isPushableFieldAttribute(field); + } + + @Override + public Query asQuery(TranslatorHandler handler) { + return new NotQuery(source(), new ExistsQuery(source(), handler.nameOf(field()))); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java index e56c19b26a902..fc69f4dc19d72 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/EsqlBinaryComparison.java @@ -7,31 +7,64 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; +import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; +import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; +import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.esql.core.querydsl.query.MatchAll; +import org.elasticsearch.xpack.esql.core.querydsl.query.NotQuery; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.core.querydsl.query.RangeQuery; +import org.elasticsearch.xpack.esql.core.querydsl.query.TermQuery; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.Check; import org.elasticsearch.xpack.esql.evaluator.mapper.EvaluatorMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.EsqlArithmeticOperation; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; +import org.elasticsearch.xpack.versionfield.Version; import java.io.IOException; +import java.math.BigDecimal; +import java.math.BigInteger; +import java.time.OffsetTime; import java.time.ZoneId; +import java.time.ZonedDateTime; +import java.util.List; import java.util.Map; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.xpack.esql.core.expression.Foldables.valueOf; +import static org.elasticsearch.xpack.esql.core.type.DataType.IP; import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; +import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; +import static org.elasticsearch.xpack.esql.core.util.NumericUtils.unsignedLongAsNumber; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.HOUR_MINUTE_SECOND; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.commonType; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.ipToString; +import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.versionToString; -public abstract class EsqlBinaryComparison extends BinaryComparison implements EvaluatorMapper { +public abstract class EsqlBinaryComparison extends BinaryComparison + implements + EvaluatorMapper, + TranslationAware.SingleValueTranslationAware { private final Map evaluatorMap; @@ -287,4 +320,224 @@ public static String formatIncompatibleTypesMessage(DataType leftType, DataType ); } + @Override + public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + if (right().foldable()) { + if (pushdownPredicates.isPushableFieldAttribute(left())) { + return true; + } + if (LucenePushdownPredicates.isPushableMetadataAttribute(left())) { + return this instanceof Equals || this instanceof NotEquals; + } + } + return false; + } + + /** + * This method is responsible for pushing the ES|QL Binary Comparison operators into Lucene. It covers: + *
      + *
    • {@link Equals}
    • + *
    • {@link NotEquals}
    • + *
    • {@link GreaterThanOrEqual}
    • + *
    • {@link GreaterThan}
    • + *
    • {@link LessThanOrEqual}
    • + *
    • {@link LessThan}
    • + *
    + * + * In general, we are able to push these down when one of the arguments is a constant (i.e. is foldable). This class assumes + * that an earlier pass through the query has rearranged things so that the foldable value will be the right hand side + * input to the operation. + */ + @Override + public Query asQuery(TranslatorHandler handler) { + Check.isTrue( + right().foldable(), + "Line {}:{}: Comparisons against fields are not (currently) supported; offender [{}] in [{}]", + right().sourceLocation().getLineNumber(), + right().sourceLocation().getColumnNumber(), + Expressions.name(right()), + symbol() + ); + + Query translated = translateOutOfRangeComparisons(); + return translated != null ? translated : translate(handler); + } + + @Override + public Expression singleValueField() { + return left(); + } + + private Query translate(TranslatorHandler handler) { + TypedAttribute attribute = LucenePushdownPredicates.checkIsPushableAttribute(left()); + String name = handler.nameOf(attribute); + Object value = valueOf(FoldContext.small() /* TODO remove me */, right()); + String format = null; + boolean isDateLiteralComparison = false; + + // TODO: This type coersion layer is copied directly from the QL counterpart code. It's probably not necessary or desireable + // in the ESQL version. We should instead do the type conversions using our casting functions. + // for a date constant comparison, we need to use a format for the date, to make sure that the format is the same + // no matter the timezone provided by the user + if (value instanceof ZonedDateTime || value instanceof OffsetTime) { + DateFormatter formatter; + if (value instanceof ZonedDateTime) { + formatter = DEFAULT_DATE_TIME_FORMATTER; + // RangeQueryBuilder accepts an Object as its parameter, but it will call .toString() on the ZonedDateTime instance + // which can have a slightly different format depending on the ZoneId used to create the ZonedDateTime + // Since RangeQueryBuilder can handle date as String as well, we'll format it as String and provide the format as well. + value = formatter.format((ZonedDateTime) value); + } else { + formatter = HOUR_MINUTE_SECOND; + value = formatter.format((OffsetTime) value); + } + format = formatter.pattern(); + isDateLiteralComparison = true; + } else if (attribute.dataType() == IP && value instanceof BytesRef bytesRef) { + value = ipToString(bytesRef); + } else if (attribute.dataType() == VERSION) { + // VersionStringFieldMapper#indexedValueForSearch() only accepts as input String or BytesRef with the String (i.e. not + // encoded) representation of the version as it'll do the encoding itself. + if (value instanceof BytesRef bytesRef) { + value = versionToString(bytesRef); + } else if (value instanceof Version version) { + value = versionToString(version); + } + } else if (attribute.dataType() == UNSIGNED_LONG && value instanceof Long ul) { + value = unsignedLongAsNumber(ul); + } + + ZoneId zoneId = null; + if (DataType.isDateTime(attribute.dataType())) { + zoneId = zoneId(); + value = dateTimeToString((Long) value); + format = DEFAULT_DATE_TIME_FORMATTER.pattern(); + } + if (this instanceof GreaterThan) { + return new RangeQuery(source(), name, value, false, null, false, format, zoneId); + } + if (this instanceof GreaterThanOrEqual) { + return new RangeQuery(source(), name, value, true, null, false, format, zoneId); + } + if (this instanceof LessThan) { + return new RangeQuery(source(), name, null, false, value, false, format, zoneId); + } + if (this instanceof LessThanOrEqual) { + return new RangeQuery(source(), name, null, false, value, true, format, zoneId); + } + if (this instanceof Equals || this instanceof NotEquals) { + name = LucenePushdownPredicates.pushableAttributeName(attribute); + + Query query; + if (isDateLiteralComparison) { + // dates equality uses a range query because it's the one that has a "format" parameter + query = new RangeQuery(source(), name, value, true, value, true, format, zoneId); + } else { + query = new TermQuery(source(), name, value); + } + if (this instanceof NotEquals) { + query = new NotQuery(source(), query); + } + return query; + } + + throw new QlIllegalArgumentException( + "Don't know how to translate binary comparison [{}] in [{}]", + right().nodeString(), + toString() + ); + } + + private Query translateOutOfRangeComparisons() { + if ((left() instanceof FieldAttribute) == false || left().dataType().isNumeric() == false) { + return null; + } + Object value = valueOf(FoldContext.small() /* TODO remove me */, right()); + + // Comparisons with multi-values always return null in ESQL. + if (value instanceof List) { + return new MatchAll(source()).negate(source()); + } + + DataType valueType = right().dataType(); + DataType attributeDataType = left().dataType(); + if (valueType == UNSIGNED_LONG && value instanceof Long ul) { + value = unsignedLongAsNumber(ul); + } + Number num = (Number) value; + if (isInRange(attributeDataType, valueType, num)) { + return null; + } + + if (Double.isNaN(((Number) value).doubleValue())) { + return new MatchAll(source()).negate(source()); + } + + boolean matchAllOrNone; + if (this instanceof GreaterThan || this instanceof GreaterThanOrEqual) { + matchAllOrNone = (num.doubleValue() > 0) == false; + } else if (this instanceof LessThan || this instanceof LessThanOrEqual) { + matchAllOrNone = (num.doubleValue() > 0); + } else if (this instanceof Equals) { + matchAllOrNone = false; + } else if (this instanceof NotEquals) { + matchAllOrNone = true; + } else { + throw new QlIllegalArgumentException("Unknown binary comparison [{}]", toString()); + } + + return matchAllOrNone ? new MatchAll(source()) : new MatchAll(source()).negate(source()); + } + + private static final BigDecimal HALF_FLOAT_MAX = BigDecimal.valueOf(65504); + private static final BigDecimal UNSIGNED_LONG_MAX = BigDecimal.valueOf(2).pow(64).subtract(BigDecimal.ONE); + + private static boolean isInRange(DataType numericFieldDataType, DataType valueDataType, Number value) { + double doubleValue = value.doubleValue(); + if (Double.isNaN(doubleValue) || Double.isInfinite(doubleValue)) { + return false; + } + + BigDecimal decimalValue; + if (value instanceof BigInteger bigIntValue) { + // Unsigned longs may be represented as BigInteger. + decimalValue = new BigDecimal(bigIntValue); + } else { + decimalValue = valueDataType.isRationalNumber() ? BigDecimal.valueOf(doubleValue) : BigDecimal.valueOf(value.longValue()); + } + + // Determine min/max for dataType. Use BigDecimals as doubles will have rounding errors for long/ulong. + BigDecimal minValue; + BigDecimal maxValue; + if (numericFieldDataType == DataType.BYTE) { + minValue = BigDecimal.valueOf(Byte.MIN_VALUE); + maxValue = BigDecimal.valueOf(Byte.MAX_VALUE); + } else if (numericFieldDataType == DataType.SHORT) { + minValue = BigDecimal.valueOf(Short.MIN_VALUE); + maxValue = BigDecimal.valueOf(Short.MAX_VALUE); + } else if (numericFieldDataType == DataType.INTEGER) { + minValue = BigDecimal.valueOf(Integer.MIN_VALUE); + maxValue = BigDecimal.valueOf(Integer.MAX_VALUE); + } else if (numericFieldDataType == DataType.LONG) { + minValue = BigDecimal.valueOf(Long.MIN_VALUE); + maxValue = BigDecimal.valueOf(Long.MAX_VALUE); + } else if (numericFieldDataType == DataType.UNSIGNED_LONG) { + minValue = BigDecimal.ZERO; + maxValue = UNSIGNED_LONG_MAX; + } else if (numericFieldDataType == DataType.HALF_FLOAT) { + minValue = HALF_FLOAT_MAX.negate(); + maxValue = HALF_FLOAT_MAX; + } else if (numericFieldDataType == DataType.FLOAT) { + minValue = BigDecimal.valueOf(-Float.MAX_VALUE); + maxValue = BigDecimal.valueOf(Float.MAX_VALUE); + } else if (numericFieldDataType == DataType.DOUBLE || numericFieldDataType == DataType.SCALED_FLOAT) { + // Scaled floats are represented as doubles in ESQL. + minValue = BigDecimal.valueOf(-Double.MAX_VALUE); + maxValue = BigDecimal.valueOf(Double.MAX_VALUE); + } else { + throw new QlIllegalArgumentException("Data type [{}] unsupported for numeric range check", numericFieldDataType); + } + + return minValue.compareTo(decimalValue) <= 0 && maxValue.compareTo(decimalValue) >= 0; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java index ff90f472bae30..d5969d3a0ab37 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/In.java @@ -16,10 +16,15 @@ import org.elasticsearch.compute.data.Vector; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.Comparisons; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.core.querydsl.query.TermQuery; +import org.elasticsearch.xpack.esql.core.querydsl.query.TermsQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -30,15 +35,22 @@ import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.esql.expression.function.scalar.EsqlScalarFunction; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Cast; +import org.elasticsearch.xpack.esql.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.esql.io.stream.PlanStreamInput; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; import java.io.IOException; +import java.util.ArrayList; import java.util.BitSet; import java.util.Collections; +import java.util.LinkedHashSet; import java.util.List; +import java.util.Set; import static org.elasticsearch.common.logging.LoggerMessageFormat.format; +import static org.elasticsearch.xpack.esql.core.expression.Foldables.valueOf; import static org.elasticsearch.xpack.esql.core.expression.TypeResolutions.ParamOrdinal.DEFAULT; import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; import static org.elasticsearch.xpack.esql.core.type.DataType.DATETIME; @@ -101,7 +113,7 @@ * String Template generators that we use for things like {@link Block} and {@link Vector}. *

    */ -public class In extends EsqlScalarFunction { +public class In extends EsqlScalarFunction implements TranslationAware.SingleValueTranslationAware { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry(Expression.class, "In", In::new); private final Expression value; @@ -444,4 +456,60 @@ static boolean process(BitSet nulls, BitSet mvs, BytesRef lhs, BytesRef[] rhs) { } return false; } + + @Override + public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + return pushdownPredicates.isPushableAttribute(value) && Expressions.foldable(list()); + } + + @Override + public Query asQuery(TranslatorHandler handler) { + return translate(handler); + } + + private Query translate(TranslatorHandler handler) { + TypedAttribute attribute = LucenePushdownPredicates.checkIsPushableAttribute(value()); + + Set terms = new LinkedHashSet<>(); + List queries = new ArrayList<>(); + + for (Expression rhs : list()) { + if (DataType.isNull(rhs.dataType()) == false) { + if (needsTypeSpecificValueHandling(attribute.dataType())) { + // delegates to BinaryComparisons translator to ensure consistent handling of date and time values + // TODO: + // Query query = BinaryComparisons.translate(new Equals(in.source(), in.value(), rhs), handler); + Query query = handler.asQuery(new Equals(source(), value(), rhs)); + + if (query instanceof TermQuery) { + terms.add(((TermQuery) query).value()); + } else { + queries.add(query); + } + } else { + terms.add(valueOf(FoldContext.small() /* TODO remove me */, rhs)); + } + } + } + + if (terms.isEmpty() == false) { + String fieldName = LucenePushdownPredicates.pushableAttributeName(attribute); + queries.add(new TermsQuery(source(), fieldName, terms)); + } + + return queries.stream().reduce((q1, q2) -> or(source(), q1, q2)).get(); + } + + private static boolean needsTypeSpecificValueHandling(DataType fieldType) { + return DataType.isDateTime(fieldType) || fieldType == IP || fieldType == VERSION || fieldType == UNSIGNED_LONG; + } + + private static Query or(Source source, Query left, Query right) { + return BinaryLogic.boolQuery(source, left, right, false); + } + + @Override + public Expression singleValueField() { + return value; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveBinaryComparison.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveBinaryComparison.java index 1ce87094e50b3..83e6dd6bdffb2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveBinaryComparison.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveBinaryComparison.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.esql.expression.predicate.operator.comparison; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.function.scalar.BinaryScalarFunction; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -14,7 +15,7 @@ import java.io.IOException; -public abstract class InsensitiveBinaryComparison extends BinaryScalarFunction { +public abstract class InsensitiveBinaryComparison extends BinaryScalarFunction implements TranslationAware.SingleValueTranslationAware { protected InsensitiveBinaryComparison(Source source, Expression left, Expression right) { super(source, left, right); @@ -28,5 +29,4 @@ protected InsensitiveBinaryComparison(StreamInput in) throws IOException { public DataType dataType() { return DataType.BOOLEAN; } - } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEquals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEquals.java index 01564644bf5c7..3a9ca27637db8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEquals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/predicate/operator/comparison/InsensitiveEquals.java @@ -16,13 +16,22 @@ import org.elasticsearch.compute.ann.Evaluator; import org.elasticsearch.compute.ann.Fixed; import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; +import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.core.querydsl.query.TermQuery; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.util.Check; +import org.elasticsearch.xpack.esql.optimizer.rules.physical.local.LucenePushdownPredicates; +import org.elasticsearch.xpack.esql.planner.TranslatorHandler; import java.io.IOException; +import static org.elasticsearch.xpack.esql.core.expression.Foldables.valueOf; + public class InsensitiveEquals extends InsensitiveBinaryComparison { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( Expression.class, @@ -86,4 +95,38 @@ public Boolean fold(FoldContext ctx) { } return process(leftVal, rightVal); } + + @Override + public boolean translatable(LucenePushdownPredicates pushdownPredicates) { + return pushdownPredicates.isPushableFieldAttribute(left()) && right().foldable(); + } + + @Override + public Query asQuery(TranslatorHandler handler) { + checkInsensitiveComparison(); + return translate(); + } + + private void checkInsensitiveComparison() { + Check.isTrue( + right().foldable(), + "Line {}:{}: Comparisons against fields are not (currently) supported; offender [{}] in [{}]", + right().sourceLocation().getLineNumber(), + right().sourceLocation().getColumnNumber(), + Expressions.name(right()), + symbol() + ); + } + + private Query translate() { + TypedAttribute attribute = LucenePushdownPredicates.checkIsPushableAttribute(left()); + BytesRef value = BytesRefs.toBytesRef(valueOf(FoldContext.small() /* TODO remove me */, right())); + String name = LucenePushdownPredicates.pushableAttributeName(attribute); + return new TermQuery(source(), name, value.utf8ToString(), true); + } + + @Override + public Expression singleValueField() { + return left(); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsElimination.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsElimination.java index 5f463f2aa4c78..d3825361f7de6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsElimination.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsElimination.java @@ -9,8 +9,8 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.function.Function; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplification.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplification.java index e1803872fd606..3be1889ab3c4d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplification.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplification.java @@ -11,23 +11,23 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryPredicate; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import java.util.List; import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; import static org.elasticsearch.xpack.esql.core.expression.Literal.TRUE; -import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.combineAnd; -import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.combineOr; -import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.inCommon; -import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.splitAnd; -import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.splitOr; -import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.subtract; import static org.elasticsearch.xpack.esql.core.util.CollectionUtils.combine; +import static org.elasticsearch.xpack.esql.expression.predicate.Predicates.combineAnd; +import static org.elasticsearch.xpack.esql.expression.predicate.Predicates.combineOr; +import static org.elasticsearch.xpack.esql.expression.predicate.Predicates.inCommon; +import static org.elasticsearch.xpack.esql.expression.predicate.Predicates.splitAnd; +import static org.elasticsearch.xpack.esql.expression.predicate.Predicates.splitOr; +import static org.elasticsearch.xpack.esql.expression.predicate.Predicates.subtract; public final class BooleanSimplification extends OptimizerRules.OptimizerExpressionRule { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisons.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisons.java index 1c290a7c4c4fd..e2b5df3a82783 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisons.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisons.java @@ -9,12 +9,12 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.BinaryLogic; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctions.java index e1cda9cb149d4..b570d5c3bc5f4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctions.java @@ -10,10 +10,10 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; @@ -27,8 +27,8 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.combineOr; -import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.splitOr; +import static org.elasticsearch.xpack.esql.expression.predicate.Predicates.combineOr; +import static org.elasticsearch.xpack.esql.expression.predicate.Predicates.splitOr; import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.ipToString; /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ExtractAggregateCommonFilter.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ExtractAggregateCommonFilter.java index f00a8103f913e..29280b1af8ad5 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ExtractAggregateCommonFilter.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ExtractAggregateCommonFilter.java @@ -18,7 +18,7 @@ import java.util.ArrayList; import java.util.List; -import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.extractCommon; +import static org.elasticsearch.xpack.esql.expression.predicate.Predicates.extractCommon; /** * Extract a per-function expression filter applied to all the aggs as a query {@link Filter}, when no groups are provided. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEquals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEquals.java index 5a1677f2759e3..48542a94d1355 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEquals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEquals.java @@ -9,14 +9,14 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.expression.predicate.Range; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.expression.predicate.Range; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.BinaryLogic; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullable.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullable.java index e3165180e331c..1c9ce3541f81b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullable.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullable.java @@ -10,11 +10,11 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNotNull; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneColumns.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneColumns.java index 1c7e765c6bd59..512a7253b813f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneColumns.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneColumns.java @@ -102,15 +102,13 @@ public LogicalPlan apply(LogicalPlan plan) { p = new Eval(eval.source(), eval.child(), remaining); } } - } else if (p instanceof EsRelation esRelation && esRelation.indexMode() == IndexMode.LOOKUP) { + } else if (p instanceof EsRelation esr && esr.indexMode() == IndexMode.LOOKUP) { // Normally, pruning EsRelation has no effect because InsertFieldExtraction only extracts the required fields, anyway. - // The field extraction for LOOKUP JOIN works differently, however - we extract all fields (other than the join key) - // that the EsRelation has. - var remaining = removeUnused(esRelation.output(), used); - // TODO: LookupFromIndexOperator cannot handle 0 lookup fields, yet. That means 1 field in total (key field + lookup). - // https://github.com/elastic/elasticsearch/issues/118778 - if (remaining != null && remaining.size() > 1) { - p = new EsRelation(esRelation.source(), esRelation.index(), remaining, esRelation.indexMode(), esRelation.frozen()); + // However, InsertFieldExtraction can't be currently used in LOOKUP JOIN right index, + // it works differently as we extract all fields (other than the join key) that the EsRelation has. + var remaining = removeUnused(esr.output(), used); + if (remaining != null) { + p = new EsRelation(esr.source(), esr.indexPattern(), esr.indexMode(), esr.indexNameWithModes(), remaining); } } } while (recheck); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneFilters.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneFilters.java index 00698d009ea23..88c3b46549d42 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneFilters.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PruneFilters.java @@ -10,10 +10,10 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.BinaryLogic; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.plan.logical.Filter; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFilters.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFilters.java index 9ec902e729f54..a5f7ea326eb34 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFilters.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFilters.java @@ -14,8 +14,8 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.plan.logical.Enrich; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Filter; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatch.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatch.java index 7953b2b28eaaa..6d92fd40584e7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatch.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatch.java @@ -9,10 +9,10 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RegexMatch; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.StringPattern; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; import org.elasticsearch.xpack.esql.parser.ParsingException; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStringCasingWithInsensitiveEquals.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStringCasingWithInsensitiveEquals.java index 053441bce5e1f..921db7f7ad23e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStringCasingWithInsensitiveEquals.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceStringCasingWithInsensitiveEquals.java @@ -11,10 +11,10 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.expression.function.scalar.string.ChangeCase; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SkipQueryOnEmptyMappings.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SkipQueryOnEmptyMappings.java index a8672b64c8b98..d57a3de21b4a6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SkipQueryOnEmptyMappings.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SkipQueryOnEmptyMappings.java @@ -16,6 +16,6 @@ public final class SkipQueryOnEmptyMappings extends OptimizerRules.OptimizerRule @Override protected LogicalPlan rule(EsRelation plan) { - return plan.index().concreteIndices().isEmpty() ? new LocalRelation(plan.source(), plan.output(), LocalSupplier.EMPTY) : plan; + return plan.concreteIndices().isEmpty() ? new LocalRelation(plan.source(), plan.output(), LocalSupplier.EMPTY) : plan; } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SplitInWithFoldableValue.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SplitInWithFoldableValue.java index 870464feb4867..534f118617d50 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SplitInWithFoldableValue.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/SplitInWithFoldableValue.java @@ -9,7 +9,7 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.optimizer.LogicalOptimizerContext; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/TranslateMetricsAggregate.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/TranslateMetricsAggregate.java index 2879db5042f4a..5f34899875efd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/TranslateMetricsAggregate.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/TranslateMetricsAggregate.java @@ -220,7 +220,7 @@ private static Aggregate toStandardAggregate(Aggregate metrics) { if (attributes.stream().noneMatch(a -> a.name().equals(MetadataAttribute.TIMESTAMP_FIELD))) { attributes.removeIf(a -> a.name().equals(MetadataAttribute.TIMESTAMP_FIELD)); } - return new EsRelation(r.source(), r.index(), new ArrayList<>(attributes), IndexMode.STANDARD); + return new EsRelation(r.source(), r.indexPattern(), IndexMode.STANDARD, r.indexNameWithModes(), new ArrayList<>(attributes)); }); return new Aggregate(metrics.source(), child, Aggregate.AggregateType.STANDARD, metrics.groupings(), metrics.aggregates()); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/InferIsNotNull.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/InferIsNotNull.java index d161071fe2839..f27be368f12b4 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/InferIsNotNull.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/InferIsNotNull.java @@ -11,11 +11,11 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeMap; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.esql.rule.Rule; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/InferNonNullAggConstraint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/InferNonNullAggConstraint.java index 35bb4e1dc082f..bb818eb987021 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/InferNonNullAggConstraint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/local/InferNonNullAggConstraint.java @@ -11,9 +11,9 @@ import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.expression.function.aggregate.AggregateFunction; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.rules.logical.OptimizerRules; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java index afeab28745c65..13f18f4b2bf4d 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/EnableSpatialDistancePushdown.java @@ -20,14 +20,14 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.NameId; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialDisjoint; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialIntersects; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StDistance; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerRules; @@ -42,8 +42,7 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.splitAnd; -import static org.elasticsearch.xpack.esql.optimizer.rules.physical.local.PushFiltersToSource.canPushSpatialFunctionToSource; +import static org.elasticsearch.xpack.esql.expression.predicate.Predicates.splitAnd; import static org.elasticsearch.xpack.esql.optimizer.rules.physical.local.PushFiltersToSource.canPushToSource; import static org.elasticsearch.xpack.esql.optimizer.rules.physical.local.PushFiltersToSource.getAliasReplacedBy; @@ -184,7 +183,7 @@ private PhysicalPlan rewriteBySplittingFilter( private Map getPushableDistances(List aliases, LucenePushdownPredicates lucenePushdownPredicates) { Map distances = new LinkedHashMap<>(); aliases.forEach(alias -> { - if (alias.child() instanceof StDistance distance && canPushSpatialFunctionToSource(distance, lucenePushdownPredicates)) { + if (alias.child() instanceof StDistance distance && distance.translatable(lucenePushdownPredicates)) { distances.put(alias.id(), distance); } else if (alias.child() instanceof ReferenceAttribute ref && distances.containsKey(ref.id())) { StDistance distance = distances.get(ref.id()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/LucenePushdownPredicates.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/LucenePushdownPredicates.java index 8046d6bc56607..8c8460cc5e1ce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/LucenePushdownPredicates.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/LucenePushdownPredicates.java @@ -10,7 +10,9 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; +import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.Check; import org.elasticsearch.xpack.esql.stats.SearchStats; /** @@ -60,8 +62,36 @@ default boolean isPushableFieldAttribute(Expression exp) { return false; } - default boolean isPushableMetadataAttribute(Expression exp) { - return exp instanceof MetadataAttribute ma && ma.name().equals(MetadataAttribute.SCORE); + static boolean isPushableTextFieldAttribute(Expression exp) { + return exp instanceof FieldAttribute fa && (fa.dataType() == DataType.TEXT || fa.dataType() == DataType.SEMANTIC_TEXT); + } + + static boolean isPushableMetadataAttribute(Expression exp) { + return exp instanceof MetadataAttribute ma && (ma.searchable() || ma.name().equals(MetadataAttribute.SCORE)); + } + + default boolean isPushableAttribute(Expression exp) { + return isPushableFieldAttribute(exp) || isPushableMetadataAttribute(exp); + } + + static TypedAttribute checkIsPushableAttribute(Expression e) { + Check.isTrue( + e instanceof FieldAttribute || e instanceof MetadataAttribute, + "Expected a FieldAttribute or MetadataAttribute but received [{}]", + e + ); + return (TypedAttribute) e; + } + + static FieldAttribute checkIsFieldAttribute(Expression e) { + Check.isTrue(e instanceof FieldAttribute, "Expected a FieldAttribute but received [{}] of type [{}]", e, e.getClass()); + return (FieldAttribute) e; + } + + static String pushableAttributeName(TypedAttribute attribute) { + return attribute instanceof FieldAttribute fa + ? fa.exactAttribute().name() // equality should always be against an exact match (which is important for strings) + : attribute.name(); } /** diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java index dae84bb6b6494..2f28b1a0e41ba 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushFiltersToSource.java @@ -8,54 +8,36 @@ package org.elasticsearch.xpack.esql.optimizer.rules.physical.local; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeMap; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.expression.function.scalar.UnaryScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.expression.predicate.Range; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RegexMatch; -import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardLike; import org.elasticsearch.xpack.esql.core.querydsl.query.Query; -import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.core.util.Queries; -import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; -import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.BinarySpatialFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.expression.predicate.Range; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveBinaryComparison; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.PhysicalOptimizerRules; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EvalExec; import org.elasticsearch.xpack.esql.plan.physical.FilterExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; -import org.elasticsearch.xpack.esql.planner.PlannerUtils; import java.util.ArrayList; import java.util.List; import static java.util.Arrays.asList; -import static org.elasticsearch.xpack.esql.core.expression.predicate.Predicates.splitAnd; +import static org.elasticsearch.xpack.esql.expression.predicate.Predicates.splitAnd; +import static org.elasticsearch.xpack.esql.planner.TranslatorHandler.TRANSLATOR_HANDLER; public class PushFiltersToSource extends PhysicalOptimizerRules.ParameterizedOptimizerRule { @@ -117,13 +99,14 @@ private static PhysicalPlan rewrite( // Combine GT, GTE, LT and LTE in pushable to Range if possible List newPushable = combineEligiblePushableToRange(pushable); if (newPushable.size() > 0) { // update the executable with pushable conditions - Query queryDSL = PlannerUtils.TRANSLATOR_HANDLER.asQuery(Predicates.combineAnd(newPushable)); + Query queryDSL = TRANSLATOR_HANDLER.asQuery(Predicates.combineAnd(newPushable)); QueryBuilder planQuery = queryDSL.asBuilder(); var query = Queries.combine(Queries.Clause.FILTER, asList(queryExec.query(), planQuery)); queryExec = new EsQueryExec( queryExec.source(), - queryExec.index(), + queryExec.indexPattern(), queryExec.indexMode(), + queryExec.indexNameWithModes(), queryExec.output(), query, queryExec.limit(), @@ -227,66 +210,6 @@ public static boolean canPushToSource(Expression exp) { } static boolean canPushToSource(Expression exp, LucenePushdownPredicates lucenePushdownPredicates) { - if (exp instanceof BinaryComparison bc) { - return isAttributePushable(bc.left(), bc, lucenePushdownPredicates) && bc.right().foldable(); - } else if (exp instanceof InsensitiveBinaryComparison bc) { - return isAttributePushable(bc.left(), bc, lucenePushdownPredicates) && bc.right().foldable(); - } else if (exp instanceof BinaryLogic bl) { - return canPushToSource(bl.left(), lucenePushdownPredicates) && canPushToSource(bl.right(), lucenePushdownPredicates); - } else if (exp instanceof In in) { - return isAttributePushable(in.value(), null, lucenePushdownPredicates) && Expressions.foldable(in.list()); - } else if (exp instanceof Not not) { - return canPushToSource(not.field(), lucenePushdownPredicates); - } else if (exp instanceof UnaryScalarFunction usf) { - if (usf instanceof RegexMatch || usf instanceof IsNull || usf instanceof IsNotNull) { - if (usf instanceof IsNull || usf instanceof IsNotNull) { - if (usf.field() instanceof FieldAttribute fa && fa.dataType().equals(DataType.TEXT)) { - return true; - } - } - return isAttributePushable(usf.field(), usf, lucenePushdownPredicates); - } - } else if (exp instanceof CIDRMatch cidrMatch) { - return isAttributePushable(cidrMatch.ipField(), cidrMatch, lucenePushdownPredicates) - && Expressions.foldable(cidrMatch.matches()); - } else if (exp instanceof SpatialRelatesFunction spatial) { - return canPushSpatialFunctionToSource(spatial, lucenePushdownPredicates); - } else if (exp instanceof FullTextFunction) { - return true; - } - return false; - } - - /** - * Push-down to Lucene is only possible if one field is an indexed spatial field, and the other is a constant spatial or string column. - */ - public static boolean canPushSpatialFunctionToSource(BinarySpatialFunction s, LucenePushdownPredicates lucenePushdownPredicates) { - // The use of foldable here instead of SpatialEvaluatorFieldKey.isConstant is intentional to match the behavior of the - // Lucene pushdown code in EsqlTranslationHandler::SpatialRelatesTranslator - // We could enhance both places to support ReferenceAttributes that refer to constants, but that is a larger change - return isPushableSpatialAttribute(s.left(), lucenePushdownPredicates) && s.right().foldable() - || isPushableSpatialAttribute(s.right(), lucenePushdownPredicates) && s.left().foldable(); - } - - private static boolean isPushableSpatialAttribute(Expression exp, LucenePushdownPredicates p) { - return exp instanceof FieldAttribute fa && DataType.isSpatial(fa.dataType()) && fa.getExactInfo().hasExact() && p.isIndexed(fa); - } - - private static boolean isAttributePushable( - Expression expression, - Expression operation, - LucenePushdownPredicates lucenePushdownPredicates - ) { - if (lucenePushdownPredicates.isPushableFieldAttribute(expression)) { - return true; - } - if (expression instanceof MetadataAttribute ma && ma.searchable()) { - return operation == null - // no range or regex queries supported with metadata fields - || operation instanceof Equals - || operation instanceof NotEquals - || operation instanceof WildcardLike; - } - return false; + return exp instanceof TranslationAware aware && aware.translatable(lucenePushdownPredicates); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushStatsToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushStatsToSource.java index 21bc360404628..adc6145ce2574 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushStatsToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushStatsToSource.java @@ -26,7 +26,6 @@ import org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; -import org.elasticsearch.xpack.esql.planner.PlannerUtils; import java.util.ArrayList; import java.util.List; @@ -36,6 +35,7 @@ import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.esql.optimizer.rules.physical.local.PushFiltersToSource.canPushToSource; import static org.elasticsearch.xpack.esql.plan.physical.EsStatsQueryExec.StatsType.COUNT; +import static org.elasticsearch.xpack.esql.planner.TranslatorHandler.TRANSLATOR_HANDLER; /** * Looks for the case where certain stats exist right before the query and thus can be pushed down. @@ -59,7 +59,7 @@ protected PhysicalPlan rule(AggregateExec aggregateExec, LocalPhysicalOptimizerC if (tuple.v2().size() == aggregateExec.aggregates().size()) { plan = new EsStatsQueryExec( aggregateExec.source(), - queryExec.index(), + queryExec.indexPattern(), queryExec.query(), queryExec.limit(), tuple.v1(), @@ -106,7 +106,7 @@ private Tuple, List> pushableStats( if (canPushToSource(count.filter()) == false) { return null; // can't push down } - var countFilter = PlannerUtils.TRANSLATOR_HANDLER.asQuery(count.filter()); + var countFilter = TRANSLATOR_HANDLER.asQuery(count.filter()); query = Queries.combine(Queries.Clause.MUST, asList(countFilter.asBuilder(), query)); } return new EsStatsQueryExec.Stat(fieldName, COUNT, query); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java index 24df3c1db234e..86e14961df01a 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSource.java @@ -32,6 +32,7 @@ import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.List; +import java.util.function.BiFunction; /** * We handle two main scenarios here: @@ -158,7 +159,7 @@ && canPushDownOrders(topNExec.order(), lucenePushdownPredicates)) { order.nullsPosition() ) ); - } else if (lucenePushdownPredicates.isPushableMetadataAttribute(order.child())) { + } else if (LucenePushdownPredicates.isPushableMetadataAttribute(order.child())) { pushableSorts.add(new EsQueryExec.ScoreSort(order.direction())); } else if (order.child() instanceof ReferenceAttribute referenceAttribute) { Attribute resolvedAttribute = aliasReplacedBy.resolve(referenceAttribute, referenceAttribute); @@ -197,11 +198,10 @@ && canPushDownOrders(topNExec.order(), lucenePushdownPredicates)) { private static boolean canPushDownOrders(List orders, LucenePushdownPredicates lucenePushdownPredicates) { // allow only exact FieldAttributes (no expressions) for sorting - return orders.stream() - .allMatch( - o -> lucenePushdownPredicates.isPushableFieldAttribute(o.child()) - || lucenePushdownPredicates.isPushableMetadataAttribute(o.child()) - ); + BiFunction isSortableAttribute = (exp, lpp) -> lpp.isPushableFieldAttribute(exp) + // TODO: https://github.com/elastic/elasticsearch/issues/120219 + || (exp instanceof MetadataAttribute ma && MetadataAttribute.SCORE.equals(ma.name())); + return orders.stream().allMatch(o -> isSortableAttribute.apply(o.child(), lucenePushdownPredicates)); } private static List buildFieldSorts(List orders) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/ReplaceSourceAttributes.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/ReplaceSourceAttributes.java index 11e386ddd046c..4f3358c539b05 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/ReplaceSourceAttributes.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/ReplaceSourceAttributes.java @@ -53,6 +53,6 @@ protected PhysicalPlan rule(EsSourceExec plan) { attributes.add(ma); } }); - return new EsQueryExec(plan.source(), plan.index(), plan.indexMode(), attributes, plan.query()); + return new EsQueryExec(plan.source(), plan.indexPattern(), plan.indexMode(), plan.indexNameWithModes(), attributes, plan.query()); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index c83fdbe8847a9..256bb094b45b7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -70,6 +70,8 @@ null '%' null null +null +null ']' null null @@ -199,6 +201,8 @@ MINUS ASTERISK SLASH PERCENT +LEFT_BRACES +RIGHT_BRACES NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET CLOSING_BRACKET @@ -339,6 +343,8 @@ MINUS ASTERISK SLASH PERCENT +LEFT_BRACES +RIGHT_BRACES NESTED_WHERE NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET @@ -498,4 +504,4 @@ METRICS_MODE CLOSING_METRICS_MODE atn: -[4, 0, 128, 1601, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 654, 8, 24, 11, 24, 12, 24, 655, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 664, 8, 25, 10, 25, 12, 25, 667, 9, 25, 1, 25, 3, 25, 670, 8, 25, 1, 25, 3, 25, 673, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 682, 8, 26, 10, 26, 12, 26, 685, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 693, 8, 27, 11, 27, 12, 27, 694, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 714, 8, 33, 1, 33, 4, 33, 717, 8, 33, 11, 33, 12, 33, 718, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 728, 8, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 735, 8, 38, 1, 39, 1, 39, 1, 39, 5, 39, 740, 8, 39, 10, 39, 12, 39, 743, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 751, 8, 39, 10, 39, 12, 39, 754, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 761, 8, 39, 1, 39, 3, 39, 764, 8, 39, 3, 39, 766, 8, 39, 1, 40, 4, 40, 769, 8, 40, 11, 40, 12, 40, 770, 1, 41, 4, 41, 774, 8, 41, 11, 41, 12, 41, 775, 1, 41, 1, 41, 5, 41, 780, 8, 41, 10, 41, 12, 41, 783, 9, 41, 1, 41, 1, 41, 4, 41, 787, 8, 41, 11, 41, 12, 41, 788, 1, 41, 4, 41, 792, 8, 41, 11, 41, 12, 41, 793, 1, 41, 1, 41, 5, 41, 798, 8, 41, 10, 41, 12, 41, 801, 9, 41, 3, 41, 803, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 809, 8, 41, 11, 41, 12, 41, 810, 1, 41, 1, 41, 3, 41, 815, 8, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 3, 79, 943, 8, 79, 1, 79, 5, 79, 946, 8, 79, 10, 79, 12, 79, 949, 9, 79, 1, 79, 1, 79, 4, 79, 953, 8, 79, 11, 79, 12, 79, 954, 3, 79, 957, 8, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 5, 82, 971, 8, 82, 10, 82, 12, 82, 974, 9, 82, 1, 82, 1, 82, 3, 82, 978, 8, 82, 1, 82, 4, 82, 981, 8, 82, 11, 82, 12, 82, 982, 3, 82, 985, 8, 82, 1, 83, 1, 83, 4, 83, 989, 8, 83, 11, 83, 12, 83, 990, 1, 83, 1, 83, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 3, 100, 1068, 8, 100, 1, 101, 4, 101, 1071, 8, 101, 11, 101, 12, 101, 1072, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 3, 112, 1122, 8, 112, 1, 113, 1, 113, 3, 113, 1126, 8, 113, 1, 113, 5, 113, 1129, 8, 113, 10, 113, 12, 113, 1132, 9, 113, 1, 113, 1, 113, 3, 113, 1136, 8, 113, 1, 113, 4, 113, 1139, 8, 113, 11, 113, 12, 113, 1140, 3, 113, 1143, 8, 113, 1, 114, 1, 114, 4, 114, 1147, 8, 114, 11, 114, 12, 114, 1148, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 134, 4, 134, 1234, 8, 134, 11, 134, 12, 134, 1235, 1, 134, 1, 134, 3, 134, 1240, 8, 134, 1, 134, 4, 134, 1243, 8, 134, 11, 134, 12, 134, 1244, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 4, 167, 1390, 8, 167, 11, 167, 12, 167, 1391, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 212, 2, 683, 752, 0, 213, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 0, 174, 69, 176, 70, 178, 71, 180, 72, 182, 0, 184, 73, 186, 74, 188, 75, 190, 76, 192, 0, 194, 0, 196, 77, 198, 78, 200, 79, 202, 0, 204, 0, 206, 0, 208, 0, 210, 0, 212, 0, 214, 80, 216, 0, 218, 81, 220, 0, 222, 0, 224, 82, 226, 83, 228, 84, 230, 0, 232, 0, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 85, 246, 86, 248, 87, 250, 88, 252, 0, 254, 0, 256, 0, 258, 0, 260, 0, 262, 0, 264, 89, 266, 0, 268, 90, 270, 91, 272, 92, 274, 0, 276, 0, 278, 93, 280, 94, 282, 0, 284, 95, 286, 0, 288, 96, 290, 97, 292, 98, 294, 0, 296, 0, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 99, 314, 100, 316, 101, 318, 0, 320, 0, 322, 0, 324, 0, 326, 0, 328, 0, 330, 102, 332, 103, 334, 104, 336, 0, 338, 105, 340, 106, 342, 107, 344, 108, 346, 0, 348, 0, 350, 109, 352, 110, 354, 111, 356, 112, 358, 0, 360, 0, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 113, 374, 114, 376, 115, 378, 0, 380, 0, 382, 0, 384, 0, 386, 116, 388, 117, 390, 118, 392, 0, 394, 0, 396, 0, 398, 0, 400, 119, 402, 0, 404, 0, 406, 120, 408, 121, 410, 122, 412, 0, 414, 0, 416, 0, 418, 123, 420, 124, 422, 125, 424, 0, 426, 0, 428, 126, 430, 127, 432, 128, 434, 0, 436, 0, 438, 0, 440, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1628, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 184, 1, 0, 0, 0, 1, 186, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 2, 192, 1, 0, 0, 0, 2, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 3, 202, 1, 0, 0, 0, 3, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 218, 1, 0, 0, 0, 3, 220, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 4, 230, 1, 0, 0, 0, 4, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 244, 1, 0, 0, 0, 4, 246, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 5, 252, 1, 0, 0, 0, 5, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 6, 274, 1, 0, 0, 0, 6, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 284, 1, 0, 0, 0, 6, 286, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 7, 294, 1, 0, 0, 0, 7, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 8, 318, 1, 0, 0, 0, 8, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 9, 336, 1, 0, 0, 0, 9, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 10, 346, 1, 0, 0, 0, 10, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 11, 358, 1, 0, 0, 0, 11, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 12, 378, 1, 0, 0, 0, 12, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 13, 392, 1, 0, 0, 0, 13, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 14, 412, 1, 0, 0, 0, 14, 414, 1, 0, 0, 0, 14, 416, 1, 0, 0, 0, 14, 418, 1, 0, 0, 0, 14, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 15, 424, 1, 0, 0, 0, 15, 426, 1, 0, 0, 0, 15, 428, 1, 0, 0, 0, 15, 430, 1, 0, 0, 0, 15, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 16, 442, 1, 0, 0, 0, 18, 452, 1, 0, 0, 0, 20, 459, 1, 0, 0, 0, 22, 468, 1, 0, 0, 0, 24, 475, 1, 0, 0, 0, 26, 485, 1, 0, 0, 0, 28, 492, 1, 0, 0, 0, 30, 499, 1, 0, 0, 0, 32, 506, 1, 0, 0, 0, 34, 514, 1, 0, 0, 0, 36, 526, 1, 0, 0, 0, 38, 535, 1, 0, 0, 0, 40, 541, 1, 0, 0, 0, 42, 548, 1, 0, 0, 0, 44, 555, 1, 0, 0, 0, 46, 563, 1, 0, 0, 0, 48, 571, 1, 0, 0, 0, 50, 586, 1, 0, 0, 0, 52, 598, 1, 0, 0, 0, 54, 609, 1, 0, 0, 0, 56, 617, 1, 0, 0, 0, 58, 625, 1, 0, 0, 0, 60, 633, 1, 0, 0, 0, 62, 642, 1, 0, 0, 0, 64, 653, 1, 0, 0, 0, 66, 659, 1, 0, 0, 0, 68, 676, 1, 0, 0, 0, 70, 692, 1, 0, 0, 0, 72, 698, 1, 0, 0, 0, 74, 702, 1, 0, 0, 0, 76, 704, 1, 0, 0, 0, 78, 706, 1, 0, 0, 0, 80, 709, 1, 0, 0, 0, 82, 711, 1, 0, 0, 0, 84, 720, 1, 0, 0, 0, 86, 722, 1, 0, 0, 0, 88, 727, 1, 0, 0, 0, 90, 729, 1, 0, 0, 0, 92, 734, 1, 0, 0, 0, 94, 765, 1, 0, 0, 0, 96, 768, 1, 0, 0, 0, 98, 814, 1, 0, 0, 0, 100, 816, 1, 0, 0, 0, 102, 819, 1, 0, 0, 0, 104, 823, 1, 0, 0, 0, 106, 827, 1, 0, 0, 0, 108, 829, 1, 0, 0, 0, 110, 832, 1, 0, 0, 0, 112, 834, 1, 0, 0, 0, 114, 836, 1, 0, 0, 0, 116, 841, 1, 0, 0, 0, 118, 843, 1, 0, 0, 0, 120, 849, 1, 0, 0, 0, 122, 855, 1, 0, 0, 0, 124, 858, 1, 0, 0, 0, 126, 861, 1, 0, 0, 0, 128, 866, 1, 0, 0, 0, 130, 871, 1, 0, 0, 0, 132, 873, 1, 0, 0, 0, 134, 877, 1, 0, 0, 0, 136, 882, 1, 0, 0, 0, 138, 888, 1, 0, 0, 0, 140, 891, 1, 0, 0, 0, 142, 893, 1, 0, 0, 0, 144, 899, 1, 0, 0, 0, 146, 901, 1, 0, 0, 0, 148, 906, 1, 0, 0, 0, 150, 909, 1, 0, 0, 0, 152, 912, 1, 0, 0, 0, 154, 915, 1, 0, 0, 0, 156, 917, 1, 0, 0, 0, 158, 920, 1, 0, 0, 0, 160, 922, 1, 0, 0, 0, 162, 925, 1, 0, 0, 0, 164, 927, 1, 0, 0, 0, 166, 929, 1, 0, 0, 0, 168, 931, 1, 0, 0, 0, 170, 933, 1, 0, 0, 0, 172, 935, 1, 0, 0, 0, 174, 956, 1, 0, 0, 0, 176, 958, 1, 0, 0, 0, 178, 963, 1, 0, 0, 0, 180, 984, 1, 0, 0, 0, 182, 986, 1, 0, 0, 0, 184, 994, 1, 0, 0, 0, 186, 996, 1, 0, 0, 0, 188, 1000, 1, 0, 0, 0, 190, 1004, 1, 0, 0, 0, 192, 1008, 1, 0, 0, 0, 194, 1013, 1, 0, 0, 0, 196, 1018, 1, 0, 0, 0, 198, 1022, 1, 0, 0, 0, 200, 1026, 1, 0, 0, 0, 202, 1030, 1, 0, 0, 0, 204, 1035, 1, 0, 0, 0, 206, 1039, 1, 0, 0, 0, 208, 1043, 1, 0, 0, 0, 210, 1047, 1, 0, 0, 0, 212, 1051, 1, 0, 0, 0, 214, 1055, 1, 0, 0, 0, 216, 1067, 1, 0, 0, 0, 218, 1070, 1, 0, 0, 0, 220, 1074, 1, 0, 0, 0, 222, 1078, 1, 0, 0, 0, 224, 1082, 1, 0, 0, 0, 226, 1086, 1, 0, 0, 0, 228, 1090, 1, 0, 0, 0, 230, 1094, 1, 0, 0, 0, 232, 1099, 1, 0, 0, 0, 234, 1103, 1, 0, 0, 0, 236, 1107, 1, 0, 0, 0, 238, 1112, 1, 0, 0, 0, 240, 1121, 1, 0, 0, 0, 242, 1142, 1, 0, 0, 0, 244, 1146, 1, 0, 0, 0, 246, 1150, 1, 0, 0, 0, 248, 1154, 1, 0, 0, 0, 250, 1158, 1, 0, 0, 0, 252, 1162, 1, 0, 0, 0, 254, 1167, 1, 0, 0, 0, 256, 1171, 1, 0, 0, 0, 258, 1175, 1, 0, 0, 0, 260, 1179, 1, 0, 0, 0, 262, 1184, 1, 0, 0, 0, 264, 1189, 1, 0, 0, 0, 266, 1192, 1, 0, 0, 0, 268, 1196, 1, 0, 0, 0, 270, 1200, 1, 0, 0, 0, 272, 1204, 1, 0, 0, 0, 274, 1208, 1, 0, 0, 0, 276, 1213, 1, 0, 0, 0, 278, 1218, 1, 0, 0, 0, 280, 1223, 1, 0, 0, 0, 282, 1230, 1, 0, 0, 0, 284, 1239, 1, 0, 0, 0, 286, 1246, 1, 0, 0, 0, 288, 1250, 1, 0, 0, 0, 290, 1254, 1, 0, 0, 0, 292, 1258, 1, 0, 0, 0, 294, 1262, 1, 0, 0, 0, 296, 1268, 1, 0, 0, 0, 298, 1272, 1, 0, 0, 0, 300, 1276, 1, 0, 0, 0, 302, 1280, 1, 0, 0, 0, 304, 1284, 1, 0, 0, 0, 306, 1288, 1, 0, 0, 0, 308, 1292, 1, 0, 0, 0, 310, 1297, 1, 0, 0, 0, 312, 1302, 1, 0, 0, 0, 314, 1306, 1, 0, 0, 0, 316, 1310, 1, 0, 0, 0, 318, 1314, 1, 0, 0, 0, 320, 1319, 1, 0, 0, 0, 322, 1323, 1, 0, 0, 0, 324, 1328, 1, 0, 0, 0, 326, 1333, 1, 0, 0, 0, 328, 1337, 1, 0, 0, 0, 330, 1341, 1, 0, 0, 0, 332, 1345, 1, 0, 0, 0, 334, 1349, 1, 0, 0, 0, 336, 1353, 1, 0, 0, 0, 338, 1358, 1, 0, 0, 0, 340, 1363, 1, 0, 0, 0, 342, 1367, 1, 0, 0, 0, 344, 1371, 1, 0, 0, 0, 346, 1375, 1, 0, 0, 0, 348, 1380, 1, 0, 0, 0, 350, 1389, 1, 0, 0, 0, 352, 1393, 1, 0, 0, 0, 354, 1397, 1, 0, 0, 0, 356, 1401, 1, 0, 0, 0, 358, 1405, 1, 0, 0, 0, 360, 1410, 1, 0, 0, 0, 362, 1414, 1, 0, 0, 0, 364, 1418, 1, 0, 0, 0, 366, 1422, 1, 0, 0, 0, 368, 1427, 1, 0, 0, 0, 370, 1431, 1, 0, 0, 0, 372, 1435, 1, 0, 0, 0, 374, 1439, 1, 0, 0, 0, 376, 1443, 1, 0, 0, 0, 378, 1447, 1, 0, 0, 0, 380, 1453, 1, 0, 0, 0, 382, 1457, 1, 0, 0, 0, 384, 1461, 1, 0, 0, 0, 386, 1465, 1, 0, 0, 0, 388, 1469, 1, 0, 0, 0, 390, 1473, 1, 0, 0, 0, 392, 1477, 1, 0, 0, 0, 394, 1482, 1, 0, 0, 0, 396, 1486, 1, 0, 0, 0, 398, 1490, 1, 0, 0, 0, 400, 1496, 1, 0, 0, 0, 402, 1505, 1, 0, 0, 0, 404, 1509, 1, 0, 0, 0, 406, 1513, 1, 0, 0, 0, 408, 1517, 1, 0, 0, 0, 410, 1521, 1, 0, 0, 0, 412, 1525, 1, 0, 0, 0, 414, 1530, 1, 0, 0, 0, 416, 1536, 1, 0, 0, 0, 418, 1542, 1, 0, 0, 0, 420, 1546, 1, 0, 0, 0, 422, 1550, 1, 0, 0, 0, 424, 1554, 1, 0, 0, 0, 426, 1560, 1, 0, 0, 0, 428, 1566, 1, 0, 0, 0, 430, 1570, 1, 0, 0, 0, 432, 1574, 1, 0, 0, 0, 434, 1578, 1, 0, 0, 0, 436, 1584, 1, 0, 0, 0, 438, 1590, 1, 0, 0, 0, 440, 1596, 1, 0, 0, 0, 442, 443, 7, 0, 0, 0, 443, 444, 7, 1, 0, 0, 444, 445, 7, 2, 0, 0, 445, 446, 7, 2, 0, 0, 446, 447, 7, 3, 0, 0, 447, 448, 7, 4, 0, 0, 448, 449, 7, 5, 0, 0, 449, 450, 1, 0, 0, 0, 450, 451, 6, 0, 0, 0, 451, 17, 1, 0, 0, 0, 452, 453, 7, 0, 0, 0, 453, 454, 7, 6, 0, 0, 454, 455, 7, 7, 0, 0, 455, 456, 7, 8, 0, 0, 456, 457, 1, 0, 0, 0, 457, 458, 6, 1, 1, 0, 458, 19, 1, 0, 0, 0, 459, 460, 7, 3, 0, 0, 460, 461, 7, 9, 0, 0, 461, 462, 7, 6, 0, 0, 462, 463, 7, 1, 0, 0, 463, 464, 7, 4, 0, 0, 464, 465, 7, 10, 0, 0, 465, 466, 1, 0, 0, 0, 466, 467, 6, 2, 2, 0, 467, 21, 1, 0, 0, 0, 468, 469, 7, 3, 0, 0, 469, 470, 7, 11, 0, 0, 470, 471, 7, 12, 0, 0, 471, 472, 7, 13, 0, 0, 472, 473, 1, 0, 0, 0, 473, 474, 6, 3, 0, 0, 474, 23, 1, 0, 0, 0, 475, 476, 7, 3, 0, 0, 476, 477, 7, 14, 0, 0, 477, 478, 7, 8, 0, 0, 478, 479, 7, 13, 0, 0, 479, 480, 7, 12, 0, 0, 480, 481, 7, 1, 0, 0, 481, 482, 7, 9, 0, 0, 482, 483, 1, 0, 0, 0, 483, 484, 6, 4, 3, 0, 484, 25, 1, 0, 0, 0, 485, 486, 7, 15, 0, 0, 486, 487, 7, 6, 0, 0, 487, 488, 7, 7, 0, 0, 488, 489, 7, 16, 0, 0, 489, 490, 1, 0, 0, 0, 490, 491, 6, 5, 4, 0, 491, 27, 1, 0, 0, 0, 492, 493, 7, 17, 0, 0, 493, 494, 7, 6, 0, 0, 494, 495, 7, 7, 0, 0, 495, 496, 7, 18, 0, 0, 496, 497, 1, 0, 0, 0, 497, 498, 6, 6, 0, 0, 498, 29, 1, 0, 0, 0, 499, 500, 7, 18, 0, 0, 500, 501, 7, 3, 0, 0, 501, 502, 7, 3, 0, 0, 502, 503, 7, 8, 0, 0, 503, 504, 1, 0, 0, 0, 504, 505, 6, 7, 1, 0, 505, 31, 1, 0, 0, 0, 506, 507, 7, 13, 0, 0, 507, 508, 7, 1, 0, 0, 508, 509, 7, 16, 0, 0, 509, 510, 7, 1, 0, 0, 510, 511, 7, 5, 0, 0, 511, 512, 1, 0, 0, 0, 512, 513, 6, 8, 0, 0, 513, 33, 1, 0, 0, 0, 514, 515, 7, 16, 0, 0, 515, 516, 7, 11, 0, 0, 516, 517, 5, 95, 0, 0, 517, 518, 7, 3, 0, 0, 518, 519, 7, 14, 0, 0, 519, 520, 7, 8, 0, 0, 520, 521, 7, 12, 0, 0, 521, 522, 7, 9, 0, 0, 522, 523, 7, 0, 0, 0, 523, 524, 1, 0, 0, 0, 524, 525, 6, 9, 5, 0, 525, 35, 1, 0, 0, 0, 526, 527, 7, 6, 0, 0, 527, 528, 7, 3, 0, 0, 528, 529, 7, 9, 0, 0, 529, 530, 7, 12, 0, 0, 530, 531, 7, 16, 0, 0, 531, 532, 7, 3, 0, 0, 532, 533, 1, 0, 0, 0, 533, 534, 6, 10, 6, 0, 534, 37, 1, 0, 0, 0, 535, 536, 7, 6, 0, 0, 536, 537, 7, 7, 0, 0, 537, 538, 7, 19, 0, 0, 538, 539, 1, 0, 0, 0, 539, 540, 6, 11, 0, 0, 540, 39, 1, 0, 0, 0, 541, 542, 7, 2, 0, 0, 542, 543, 7, 10, 0, 0, 543, 544, 7, 7, 0, 0, 544, 545, 7, 19, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 6, 12, 7, 0, 547, 41, 1, 0, 0, 0, 548, 549, 7, 2, 0, 0, 549, 550, 7, 7, 0, 0, 550, 551, 7, 6, 0, 0, 551, 552, 7, 5, 0, 0, 552, 553, 1, 0, 0, 0, 553, 554, 6, 13, 0, 0, 554, 43, 1, 0, 0, 0, 555, 556, 7, 2, 0, 0, 556, 557, 7, 5, 0, 0, 557, 558, 7, 12, 0, 0, 558, 559, 7, 5, 0, 0, 559, 560, 7, 2, 0, 0, 560, 561, 1, 0, 0, 0, 561, 562, 6, 14, 0, 0, 562, 45, 1, 0, 0, 0, 563, 564, 7, 19, 0, 0, 564, 565, 7, 10, 0, 0, 565, 566, 7, 3, 0, 0, 566, 567, 7, 6, 0, 0, 567, 568, 7, 3, 0, 0, 568, 569, 1, 0, 0, 0, 569, 570, 6, 15, 0, 0, 570, 47, 1, 0, 0, 0, 571, 572, 4, 16, 0, 0, 572, 573, 7, 1, 0, 0, 573, 574, 7, 9, 0, 0, 574, 575, 7, 13, 0, 0, 575, 576, 7, 1, 0, 0, 576, 577, 7, 9, 0, 0, 577, 578, 7, 3, 0, 0, 578, 579, 7, 2, 0, 0, 579, 580, 7, 5, 0, 0, 580, 581, 7, 12, 0, 0, 581, 582, 7, 5, 0, 0, 582, 583, 7, 2, 0, 0, 583, 584, 1, 0, 0, 0, 584, 585, 6, 16, 0, 0, 585, 49, 1, 0, 0, 0, 586, 587, 4, 17, 1, 0, 587, 588, 7, 13, 0, 0, 588, 589, 7, 7, 0, 0, 589, 590, 7, 7, 0, 0, 590, 591, 7, 18, 0, 0, 591, 592, 7, 20, 0, 0, 592, 593, 7, 8, 0, 0, 593, 594, 5, 95, 0, 0, 594, 595, 5, 128020, 0, 0, 595, 596, 1, 0, 0, 0, 596, 597, 6, 17, 8, 0, 597, 51, 1, 0, 0, 0, 598, 599, 4, 18, 2, 0, 599, 600, 7, 16, 0, 0, 600, 601, 7, 3, 0, 0, 601, 602, 7, 5, 0, 0, 602, 603, 7, 6, 0, 0, 603, 604, 7, 1, 0, 0, 604, 605, 7, 4, 0, 0, 605, 606, 7, 2, 0, 0, 606, 607, 1, 0, 0, 0, 607, 608, 6, 18, 9, 0, 608, 53, 1, 0, 0, 0, 609, 610, 4, 19, 3, 0, 610, 611, 7, 21, 0, 0, 611, 612, 7, 7, 0, 0, 612, 613, 7, 1, 0, 0, 613, 614, 7, 9, 0, 0, 614, 615, 1, 0, 0, 0, 615, 616, 6, 19, 10, 0, 616, 55, 1, 0, 0, 0, 617, 618, 4, 20, 4, 0, 618, 619, 7, 15, 0, 0, 619, 620, 7, 20, 0, 0, 620, 621, 7, 13, 0, 0, 621, 622, 7, 13, 0, 0, 622, 623, 1, 0, 0, 0, 623, 624, 6, 20, 10, 0, 624, 57, 1, 0, 0, 0, 625, 626, 4, 21, 5, 0, 626, 627, 7, 13, 0, 0, 627, 628, 7, 3, 0, 0, 628, 629, 7, 15, 0, 0, 629, 630, 7, 5, 0, 0, 630, 631, 1, 0, 0, 0, 631, 632, 6, 21, 10, 0, 632, 59, 1, 0, 0, 0, 633, 634, 4, 22, 6, 0, 634, 635, 7, 6, 0, 0, 635, 636, 7, 1, 0, 0, 636, 637, 7, 17, 0, 0, 637, 638, 7, 10, 0, 0, 638, 639, 7, 5, 0, 0, 639, 640, 1, 0, 0, 0, 640, 641, 6, 22, 10, 0, 641, 61, 1, 0, 0, 0, 642, 643, 4, 23, 7, 0, 643, 644, 7, 13, 0, 0, 644, 645, 7, 7, 0, 0, 645, 646, 7, 7, 0, 0, 646, 647, 7, 18, 0, 0, 647, 648, 7, 20, 0, 0, 648, 649, 7, 8, 0, 0, 649, 650, 1, 0, 0, 0, 650, 651, 6, 23, 10, 0, 651, 63, 1, 0, 0, 0, 652, 654, 8, 22, 0, 0, 653, 652, 1, 0, 0, 0, 654, 655, 1, 0, 0, 0, 655, 653, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 657, 1, 0, 0, 0, 657, 658, 6, 24, 0, 0, 658, 65, 1, 0, 0, 0, 659, 660, 5, 47, 0, 0, 660, 661, 5, 47, 0, 0, 661, 665, 1, 0, 0, 0, 662, 664, 8, 23, 0, 0, 663, 662, 1, 0, 0, 0, 664, 667, 1, 0, 0, 0, 665, 663, 1, 0, 0, 0, 665, 666, 1, 0, 0, 0, 666, 669, 1, 0, 0, 0, 667, 665, 1, 0, 0, 0, 668, 670, 5, 13, 0, 0, 669, 668, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 670, 672, 1, 0, 0, 0, 671, 673, 5, 10, 0, 0, 672, 671, 1, 0, 0, 0, 672, 673, 1, 0, 0, 0, 673, 674, 1, 0, 0, 0, 674, 675, 6, 25, 11, 0, 675, 67, 1, 0, 0, 0, 676, 677, 5, 47, 0, 0, 677, 678, 5, 42, 0, 0, 678, 683, 1, 0, 0, 0, 679, 682, 3, 68, 26, 0, 680, 682, 9, 0, 0, 0, 681, 679, 1, 0, 0, 0, 681, 680, 1, 0, 0, 0, 682, 685, 1, 0, 0, 0, 683, 684, 1, 0, 0, 0, 683, 681, 1, 0, 0, 0, 684, 686, 1, 0, 0, 0, 685, 683, 1, 0, 0, 0, 686, 687, 5, 42, 0, 0, 687, 688, 5, 47, 0, 0, 688, 689, 1, 0, 0, 0, 689, 690, 6, 26, 11, 0, 690, 69, 1, 0, 0, 0, 691, 693, 7, 24, 0, 0, 692, 691, 1, 0, 0, 0, 693, 694, 1, 0, 0, 0, 694, 692, 1, 0, 0, 0, 694, 695, 1, 0, 0, 0, 695, 696, 1, 0, 0, 0, 696, 697, 6, 27, 11, 0, 697, 71, 1, 0, 0, 0, 698, 699, 5, 124, 0, 0, 699, 700, 1, 0, 0, 0, 700, 701, 6, 28, 12, 0, 701, 73, 1, 0, 0, 0, 702, 703, 7, 25, 0, 0, 703, 75, 1, 0, 0, 0, 704, 705, 7, 26, 0, 0, 705, 77, 1, 0, 0, 0, 706, 707, 5, 92, 0, 0, 707, 708, 7, 27, 0, 0, 708, 79, 1, 0, 0, 0, 709, 710, 8, 28, 0, 0, 710, 81, 1, 0, 0, 0, 711, 713, 7, 3, 0, 0, 712, 714, 7, 29, 0, 0, 713, 712, 1, 0, 0, 0, 713, 714, 1, 0, 0, 0, 714, 716, 1, 0, 0, 0, 715, 717, 3, 74, 29, 0, 716, 715, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 716, 1, 0, 0, 0, 718, 719, 1, 0, 0, 0, 719, 83, 1, 0, 0, 0, 720, 721, 5, 64, 0, 0, 721, 85, 1, 0, 0, 0, 722, 723, 5, 96, 0, 0, 723, 87, 1, 0, 0, 0, 724, 728, 8, 30, 0, 0, 725, 726, 5, 96, 0, 0, 726, 728, 5, 96, 0, 0, 727, 724, 1, 0, 0, 0, 727, 725, 1, 0, 0, 0, 728, 89, 1, 0, 0, 0, 729, 730, 5, 95, 0, 0, 730, 91, 1, 0, 0, 0, 731, 735, 3, 76, 30, 0, 732, 735, 3, 74, 29, 0, 733, 735, 3, 90, 37, 0, 734, 731, 1, 0, 0, 0, 734, 732, 1, 0, 0, 0, 734, 733, 1, 0, 0, 0, 735, 93, 1, 0, 0, 0, 736, 741, 5, 34, 0, 0, 737, 740, 3, 78, 31, 0, 738, 740, 3, 80, 32, 0, 739, 737, 1, 0, 0, 0, 739, 738, 1, 0, 0, 0, 740, 743, 1, 0, 0, 0, 741, 739, 1, 0, 0, 0, 741, 742, 1, 0, 0, 0, 742, 744, 1, 0, 0, 0, 743, 741, 1, 0, 0, 0, 744, 766, 5, 34, 0, 0, 745, 746, 5, 34, 0, 0, 746, 747, 5, 34, 0, 0, 747, 748, 5, 34, 0, 0, 748, 752, 1, 0, 0, 0, 749, 751, 8, 23, 0, 0, 750, 749, 1, 0, 0, 0, 751, 754, 1, 0, 0, 0, 752, 753, 1, 0, 0, 0, 752, 750, 1, 0, 0, 0, 753, 755, 1, 0, 0, 0, 754, 752, 1, 0, 0, 0, 755, 756, 5, 34, 0, 0, 756, 757, 5, 34, 0, 0, 757, 758, 5, 34, 0, 0, 758, 760, 1, 0, 0, 0, 759, 761, 5, 34, 0, 0, 760, 759, 1, 0, 0, 0, 760, 761, 1, 0, 0, 0, 761, 763, 1, 0, 0, 0, 762, 764, 5, 34, 0, 0, 763, 762, 1, 0, 0, 0, 763, 764, 1, 0, 0, 0, 764, 766, 1, 0, 0, 0, 765, 736, 1, 0, 0, 0, 765, 745, 1, 0, 0, 0, 766, 95, 1, 0, 0, 0, 767, 769, 3, 74, 29, 0, 768, 767, 1, 0, 0, 0, 769, 770, 1, 0, 0, 0, 770, 768, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, 97, 1, 0, 0, 0, 772, 774, 3, 74, 29, 0, 773, 772, 1, 0, 0, 0, 774, 775, 1, 0, 0, 0, 775, 773, 1, 0, 0, 0, 775, 776, 1, 0, 0, 0, 776, 777, 1, 0, 0, 0, 777, 781, 3, 116, 50, 0, 778, 780, 3, 74, 29, 0, 779, 778, 1, 0, 0, 0, 780, 783, 1, 0, 0, 0, 781, 779, 1, 0, 0, 0, 781, 782, 1, 0, 0, 0, 782, 815, 1, 0, 0, 0, 783, 781, 1, 0, 0, 0, 784, 786, 3, 116, 50, 0, 785, 787, 3, 74, 29, 0, 786, 785, 1, 0, 0, 0, 787, 788, 1, 0, 0, 0, 788, 786, 1, 0, 0, 0, 788, 789, 1, 0, 0, 0, 789, 815, 1, 0, 0, 0, 790, 792, 3, 74, 29, 0, 791, 790, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 791, 1, 0, 0, 0, 793, 794, 1, 0, 0, 0, 794, 802, 1, 0, 0, 0, 795, 799, 3, 116, 50, 0, 796, 798, 3, 74, 29, 0, 797, 796, 1, 0, 0, 0, 798, 801, 1, 0, 0, 0, 799, 797, 1, 0, 0, 0, 799, 800, 1, 0, 0, 0, 800, 803, 1, 0, 0, 0, 801, 799, 1, 0, 0, 0, 802, 795, 1, 0, 0, 0, 802, 803, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 805, 3, 82, 33, 0, 805, 815, 1, 0, 0, 0, 806, 808, 3, 116, 50, 0, 807, 809, 3, 74, 29, 0, 808, 807, 1, 0, 0, 0, 809, 810, 1, 0, 0, 0, 810, 808, 1, 0, 0, 0, 810, 811, 1, 0, 0, 0, 811, 812, 1, 0, 0, 0, 812, 813, 3, 82, 33, 0, 813, 815, 1, 0, 0, 0, 814, 773, 1, 0, 0, 0, 814, 784, 1, 0, 0, 0, 814, 791, 1, 0, 0, 0, 814, 806, 1, 0, 0, 0, 815, 99, 1, 0, 0, 0, 816, 817, 7, 31, 0, 0, 817, 818, 7, 32, 0, 0, 818, 101, 1, 0, 0, 0, 819, 820, 7, 12, 0, 0, 820, 821, 7, 9, 0, 0, 821, 822, 7, 0, 0, 0, 822, 103, 1, 0, 0, 0, 823, 824, 7, 12, 0, 0, 824, 825, 7, 2, 0, 0, 825, 826, 7, 4, 0, 0, 826, 105, 1, 0, 0, 0, 827, 828, 5, 61, 0, 0, 828, 107, 1, 0, 0, 0, 829, 830, 5, 58, 0, 0, 830, 831, 5, 58, 0, 0, 831, 109, 1, 0, 0, 0, 832, 833, 5, 58, 0, 0, 833, 111, 1, 0, 0, 0, 834, 835, 5, 44, 0, 0, 835, 113, 1, 0, 0, 0, 836, 837, 7, 0, 0, 0, 837, 838, 7, 3, 0, 0, 838, 839, 7, 2, 0, 0, 839, 840, 7, 4, 0, 0, 840, 115, 1, 0, 0, 0, 841, 842, 5, 46, 0, 0, 842, 117, 1, 0, 0, 0, 843, 844, 7, 15, 0, 0, 844, 845, 7, 12, 0, 0, 845, 846, 7, 13, 0, 0, 846, 847, 7, 2, 0, 0, 847, 848, 7, 3, 0, 0, 848, 119, 1, 0, 0, 0, 849, 850, 7, 15, 0, 0, 850, 851, 7, 1, 0, 0, 851, 852, 7, 6, 0, 0, 852, 853, 7, 2, 0, 0, 853, 854, 7, 5, 0, 0, 854, 121, 1, 0, 0, 0, 855, 856, 7, 1, 0, 0, 856, 857, 7, 9, 0, 0, 857, 123, 1, 0, 0, 0, 858, 859, 7, 1, 0, 0, 859, 860, 7, 2, 0, 0, 860, 125, 1, 0, 0, 0, 861, 862, 7, 13, 0, 0, 862, 863, 7, 12, 0, 0, 863, 864, 7, 2, 0, 0, 864, 865, 7, 5, 0, 0, 865, 127, 1, 0, 0, 0, 866, 867, 7, 13, 0, 0, 867, 868, 7, 1, 0, 0, 868, 869, 7, 18, 0, 0, 869, 870, 7, 3, 0, 0, 870, 129, 1, 0, 0, 0, 871, 872, 5, 40, 0, 0, 872, 131, 1, 0, 0, 0, 873, 874, 7, 9, 0, 0, 874, 875, 7, 7, 0, 0, 875, 876, 7, 5, 0, 0, 876, 133, 1, 0, 0, 0, 877, 878, 7, 9, 0, 0, 878, 879, 7, 20, 0, 0, 879, 880, 7, 13, 0, 0, 880, 881, 7, 13, 0, 0, 881, 135, 1, 0, 0, 0, 882, 883, 7, 9, 0, 0, 883, 884, 7, 20, 0, 0, 884, 885, 7, 13, 0, 0, 885, 886, 7, 13, 0, 0, 886, 887, 7, 2, 0, 0, 887, 137, 1, 0, 0, 0, 888, 889, 7, 7, 0, 0, 889, 890, 7, 6, 0, 0, 890, 139, 1, 0, 0, 0, 891, 892, 5, 63, 0, 0, 892, 141, 1, 0, 0, 0, 893, 894, 7, 6, 0, 0, 894, 895, 7, 13, 0, 0, 895, 896, 7, 1, 0, 0, 896, 897, 7, 18, 0, 0, 897, 898, 7, 3, 0, 0, 898, 143, 1, 0, 0, 0, 899, 900, 5, 41, 0, 0, 900, 145, 1, 0, 0, 0, 901, 902, 7, 5, 0, 0, 902, 903, 7, 6, 0, 0, 903, 904, 7, 20, 0, 0, 904, 905, 7, 3, 0, 0, 905, 147, 1, 0, 0, 0, 906, 907, 5, 61, 0, 0, 907, 908, 5, 61, 0, 0, 908, 149, 1, 0, 0, 0, 909, 910, 5, 61, 0, 0, 910, 911, 5, 126, 0, 0, 911, 151, 1, 0, 0, 0, 912, 913, 5, 33, 0, 0, 913, 914, 5, 61, 0, 0, 914, 153, 1, 0, 0, 0, 915, 916, 5, 60, 0, 0, 916, 155, 1, 0, 0, 0, 917, 918, 5, 60, 0, 0, 918, 919, 5, 61, 0, 0, 919, 157, 1, 0, 0, 0, 920, 921, 5, 62, 0, 0, 921, 159, 1, 0, 0, 0, 922, 923, 5, 62, 0, 0, 923, 924, 5, 61, 0, 0, 924, 161, 1, 0, 0, 0, 925, 926, 5, 43, 0, 0, 926, 163, 1, 0, 0, 0, 927, 928, 5, 45, 0, 0, 928, 165, 1, 0, 0, 0, 929, 930, 5, 42, 0, 0, 930, 167, 1, 0, 0, 0, 931, 932, 5, 47, 0, 0, 932, 169, 1, 0, 0, 0, 933, 934, 5, 37, 0, 0, 934, 171, 1, 0, 0, 0, 935, 936, 3, 46, 15, 0, 936, 937, 1, 0, 0, 0, 937, 938, 6, 78, 13, 0, 938, 173, 1, 0, 0, 0, 939, 942, 3, 140, 62, 0, 940, 943, 3, 76, 30, 0, 941, 943, 3, 90, 37, 0, 942, 940, 1, 0, 0, 0, 942, 941, 1, 0, 0, 0, 943, 947, 1, 0, 0, 0, 944, 946, 3, 92, 38, 0, 945, 944, 1, 0, 0, 0, 946, 949, 1, 0, 0, 0, 947, 945, 1, 0, 0, 0, 947, 948, 1, 0, 0, 0, 948, 957, 1, 0, 0, 0, 949, 947, 1, 0, 0, 0, 950, 952, 3, 140, 62, 0, 951, 953, 3, 74, 29, 0, 952, 951, 1, 0, 0, 0, 953, 954, 1, 0, 0, 0, 954, 952, 1, 0, 0, 0, 954, 955, 1, 0, 0, 0, 955, 957, 1, 0, 0, 0, 956, 939, 1, 0, 0, 0, 956, 950, 1, 0, 0, 0, 957, 175, 1, 0, 0, 0, 958, 959, 5, 91, 0, 0, 959, 960, 1, 0, 0, 0, 960, 961, 6, 80, 0, 0, 961, 962, 6, 80, 0, 0, 962, 177, 1, 0, 0, 0, 963, 964, 5, 93, 0, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 81, 12, 0, 966, 967, 6, 81, 12, 0, 967, 179, 1, 0, 0, 0, 968, 972, 3, 76, 30, 0, 969, 971, 3, 92, 38, 0, 970, 969, 1, 0, 0, 0, 971, 974, 1, 0, 0, 0, 972, 970, 1, 0, 0, 0, 972, 973, 1, 0, 0, 0, 973, 985, 1, 0, 0, 0, 974, 972, 1, 0, 0, 0, 975, 978, 3, 90, 37, 0, 976, 978, 3, 84, 34, 0, 977, 975, 1, 0, 0, 0, 977, 976, 1, 0, 0, 0, 978, 980, 1, 0, 0, 0, 979, 981, 3, 92, 38, 0, 980, 979, 1, 0, 0, 0, 981, 982, 1, 0, 0, 0, 982, 980, 1, 0, 0, 0, 982, 983, 1, 0, 0, 0, 983, 985, 1, 0, 0, 0, 984, 968, 1, 0, 0, 0, 984, 977, 1, 0, 0, 0, 985, 181, 1, 0, 0, 0, 986, 988, 3, 86, 35, 0, 987, 989, 3, 88, 36, 0, 988, 987, 1, 0, 0, 0, 989, 990, 1, 0, 0, 0, 990, 988, 1, 0, 0, 0, 990, 991, 1, 0, 0, 0, 991, 992, 1, 0, 0, 0, 992, 993, 3, 86, 35, 0, 993, 183, 1, 0, 0, 0, 994, 995, 3, 182, 83, 0, 995, 185, 1, 0, 0, 0, 996, 997, 3, 66, 25, 0, 997, 998, 1, 0, 0, 0, 998, 999, 6, 85, 11, 0, 999, 187, 1, 0, 0, 0, 1000, 1001, 3, 68, 26, 0, 1001, 1002, 1, 0, 0, 0, 1002, 1003, 6, 86, 11, 0, 1003, 189, 1, 0, 0, 0, 1004, 1005, 3, 70, 27, 0, 1005, 1006, 1, 0, 0, 0, 1006, 1007, 6, 87, 11, 0, 1007, 191, 1, 0, 0, 0, 1008, 1009, 3, 176, 80, 0, 1009, 1010, 1, 0, 0, 0, 1010, 1011, 6, 88, 14, 0, 1011, 1012, 6, 88, 15, 0, 1012, 193, 1, 0, 0, 0, 1013, 1014, 3, 72, 28, 0, 1014, 1015, 1, 0, 0, 0, 1015, 1016, 6, 89, 16, 0, 1016, 1017, 6, 89, 12, 0, 1017, 195, 1, 0, 0, 0, 1018, 1019, 3, 70, 27, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 90, 11, 0, 1021, 197, 1, 0, 0, 0, 1022, 1023, 3, 66, 25, 0, 1023, 1024, 1, 0, 0, 0, 1024, 1025, 6, 91, 11, 0, 1025, 199, 1, 0, 0, 0, 1026, 1027, 3, 68, 26, 0, 1027, 1028, 1, 0, 0, 0, 1028, 1029, 6, 92, 11, 0, 1029, 201, 1, 0, 0, 0, 1030, 1031, 3, 72, 28, 0, 1031, 1032, 1, 0, 0, 0, 1032, 1033, 6, 93, 16, 0, 1033, 1034, 6, 93, 12, 0, 1034, 203, 1, 0, 0, 0, 1035, 1036, 3, 176, 80, 0, 1036, 1037, 1, 0, 0, 0, 1037, 1038, 6, 94, 14, 0, 1038, 205, 1, 0, 0, 0, 1039, 1040, 3, 178, 81, 0, 1040, 1041, 1, 0, 0, 0, 1041, 1042, 6, 95, 17, 0, 1042, 207, 1, 0, 0, 0, 1043, 1044, 3, 110, 47, 0, 1044, 1045, 1, 0, 0, 0, 1045, 1046, 6, 96, 18, 0, 1046, 209, 1, 0, 0, 0, 1047, 1048, 3, 112, 48, 0, 1048, 1049, 1, 0, 0, 0, 1049, 1050, 6, 97, 19, 0, 1050, 211, 1, 0, 0, 0, 1051, 1052, 3, 106, 45, 0, 1052, 1053, 1, 0, 0, 0, 1053, 1054, 6, 98, 20, 0, 1054, 213, 1, 0, 0, 0, 1055, 1056, 7, 16, 0, 0, 1056, 1057, 7, 3, 0, 0, 1057, 1058, 7, 5, 0, 0, 1058, 1059, 7, 12, 0, 0, 1059, 1060, 7, 0, 0, 0, 1060, 1061, 7, 12, 0, 0, 1061, 1062, 7, 5, 0, 0, 1062, 1063, 7, 12, 0, 0, 1063, 215, 1, 0, 0, 0, 1064, 1068, 8, 33, 0, 0, 1065, 1066, 5, 47, 0, 0, 1066, 1068, 8, 34, 0, 0, 1067, 1064, 1, 0, 0, 0, 1067, 1065, 1, 0, 0, 0, 1068, 217, 1, 0, 0, 0, 1069, 1071, 3, 216, 100, 0, 1070, 1069, 1, 0, 0, 0, 1071, 1072, 1, 0, 0, 0, 1072, 1070, 1, 0, 0, 0, 1072, 1073, 1, 0, 0, 0, 1073, 219, 1, 0, 0, 0, 1074, 1075, 3, 218, 101, 0, 1075, 1076, 1, 0, 0, 0, 1076, 1077, 6, 102, 21, 0, 1077, 221, 1, 0, 0, 0, 1078, 1079, 3, 94, 39, 0, 1079, 1080, 1, 0, 0, 0, 1080, 1081, 6, 103, 22, 0, 1081, 223, 1, 0, 0, 0, 1082, 1083, 3, 66, 25, 0, 1083, 1084, 1, 0, 0, 0, 1084, 1085, 6, 104, 11, 0, 1085, 225, 1, 0, 0, 0, 1086, 1087, 3, 68, 26, 0, 1087, 1088, 1, 0, 0, 0, 1088, 1089, 6, 105, 11, 0, 1089, 227, 1, 0, 0, 0, 1090, 1091, 3, 70, 27, 0, 1091, 1092, 1, 0, 0, 0, 1092, 1093, 6, 106, 11, 0, 1093, 229, 1, 0, 0, 0, 1094, 1095, 3, 72, 28, 0, 1095, 1096, 1, 0, 0, 0, 1096, 1097, 6, 107, 16, 0, 1097, 1098, 6, 107, 12, 0, 1098, 231, 1, 0, 0, 0, 1099, 1100, 3, 116, 50, 0, 1100, 1101, 1, 0, 0, 0, 1101, 1102, 6, 108, 23, 0, 1102, 233, 1, 0, 0, 0, 1103, 1104, 3, 112, 48, 0, 1104, 1105, 1, 0, 0, 0, 1105, 1106, 6, 109, 19, 0, 1106, 235, 1, 0, 0, 0, 1107, 1108, 4, 110, 8, 0, 1108, 1109, 3, 140, 62, 0, 1109, 1110, 1, 0, 0, 0, 1110, 1111, 6, 110, 24, 0, 1111, 237, 1, 0, 0, 0, 1112, 1113, 4, 111, 9, 0, 1113, 1114, 3, 174, 79, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 111, 25, 0, 1116, 239, 1, 0, 0, 0, 1117, 1122, 3, 76, 30, 0, 1118, 1122, 3, 74, 29, 0, 1119, 1122, 3, 90, 37, 0, 1120, 1122, 3, 166, 75, 0, 1121, 1117, 1, 0, 0, 0, 1121, 1118, 1, 0, 0, 0, 1121, 1119, 1, 0, 0, 0, 1121, 1120, 1, 0, 0, 0, 1122, 241, 1, 0, 0, 0, 1123, 1126, 3, 76, 30, 0, 1124, 1126, 3, 166, 75, 0, 1125, 1123, 1, 0, 0, 0, 1125, 1124, 1, 0, 0, 0, 1126, 1130, 1, 0, 0, 0, 1127, 1129, 3, 240, 112, 0, 1128, 1127, 1, 0, 0, 0, 1129, 1132, 1, 0, 0, 0, 1130, 1128, 1, 0, 0, 0, 1130, 1131, 1, 0, 0, 0, 1131, 1143, 1, 0, 0, 0, 1132, 1130, 1, 0, 0, 0, 1133, 1136, 3, 90, 37, 0, 1134, 1136, 3, 84, 34, 0, 1135, 1133, 1, 0, 0, 0, 1135, 1134, 1, 0, 0, 0, 1136, 1138, 1, 0, 0, 0, 1137, 1139, 3, 240, 112, 0, 1138, 1137, 1, 0, 0, 0, 1139, 1140, 1, 0, 0, 0, 1140, 1138, 1, 0, 0, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1143, 1, 0, 0, 0, 1142, 1125, 1, 0, 0, 0, 1142, 1135, 1, 0, 0, 0, 1143, 243, 1, 0, 0, 0, 1144, 1147, 3, 242, 113, 0, 1145, 1147, 3, 182, 83, 0, 1146, 1144, 1, 0, 0, 0, 1146, 1145, 1, 0, 0, 0, 1147, 1148, 1, 0, 0, 0, 1148, 1146, 1, 0, 0, 0, 1148, 1149, 1, 0, 0, 0, 1149, 245, 1, 0, 0, 0, 1150, 1151, 3, 66, 25, 0, 1151, 1152, 1, 0, 0, 0, 1152, 1153, 6, 115, 11, 0, 1153, 247, 1, 0, 0, 0, 1154, 1155, 3, 68, 26, 0, 1155, 1156, 1, 0, 0, 0, 1156, 1157, 6, 116, 11, 0, 1157, 249, 1, 0, 0, 0, 1158, 1159, 3, 70, 27, 0, 1159, 1160, 1, 0, 0, 0, 1160, 1161, 6, 117, 11, 0, 1161, 251, 1, 0, 0, 0, 1162, 1163, 3, 72, 28, 0, 1163, 1164, 1, 0, 0, 0, 1164, 1165, 6, 118, 16, 0, 1165, 1166, 6, 118, 12, 0, 1166, 253, 1, 0, 0, 0, 1167, 1168, 3, 106, 45, 0, 1168, 1169, 1, 0, 0, 0, 1169, 1170, 6, 119, 20, 0, 1170, 255, 1, 0, 0, 0, 1171, 1172, 3, 112, 48, 0, 1172, 1173, 1, 0, 0, 0, 1173, 1174, 6, 120, 19, 0, 1174, 257, 1, 0, 0, 0, 1175, 1176, 3, 116, 50, 0, 1176, 1177, 1, 0, 0, 0, 1177, 1178, 6, 121, 23, 0, 1178, 259, 1, 0, 0, 0, 1179, 1180, 4, 122, 10, 0, 1180, 1181, 3, 140, 62, 0, 1181, 1182, 1, 0, 0, 0, 1182, 1183, 6, 122, 24, 0, 1183, 261, 1, 0, 0, 0, 1184, 1185, 4, 123, 11, 0, 1185, 1186, 3, 174, 79, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 123, 25, 0, 1188, 263, 1, 0, 0, 0, 1189, 1190, 7, 12, 0, 0, 1190, 1191, 7, 2, 0, 0, 1191, 265, 1, 0, 0, 0, 1192, 1193, 3, 244, 114, 0, 1193, 1194, 1, 0, 0, 0, 1194, 1195, 6, 125, 26, 0, 1195, 267, 1, 0, 0, 0, 1196, 1197, 3, 66, 25, 0, 1197, 1198, 1, 0, 0, 0, 1198, 1199, 6, 126, 11, 0, 1199, 269, 1, 0, 0, 0, 1200, 1201, 3, 68, 26, 0, 1201, 1202, 1, 0, 0, 0, 1202, 1203, 6, 127, 11, 0, 1203, 271, 1, 0, 0, 0, 1204, 1205, 3, 70, 27, 0, 1205, 1206, 1, 0, 0, 0, 1206, 1207, 6, 128, 11, 0, 1207, 273, 1, 0, 0, 0, 1208, 1209, 3, 72, 28, 0, 1209, 1210, 1, 0, 0, 0, 1210, 1211, 6, 129, 16, 0, 1211, 1212, 6, 129, 12, 0, 1212, 275, 1, 0, 0, 0, 1213, 1214, 3, 176, 80, 0, 1214, 1215, 1, 0, 0, 0, 1215, 1216, 6, 130, 14, 0, 1216, 1217, 6, 130, 27, 0, 1217, 277, 1, 0, 0, 0, 1218, 1219, 7, 7, 0, 0, 1219, 1220, 7, 9, 0, 0, 1220, 1221, 1, 0, 0, 0, 1221, 1222, 6, 131, 28, 0, 1222, 279, 1, 0, 0, 0, 1223, 1224, 7, 19, 0, 0, 1224, 1225, 7, 1, 0, 0, 1225, 1226, 7, 5, 0, 0, 1226, 1227, 7, 10, 0, 0, 1227, 1228, 1, 0, 0, 0, 1228, 1229, 6, 132, 28, 0, 1229, 281, 1, 0, 0, 0, 1230, 1231, 8, 35, 0, 0, 1231, 283, 1, 0, 0, 0, 1232, 1234, 3, 282, 133, 0, 1233, 1232, 1, 0, 0, 0, 1234, 1235, 1, 0, 0, 0, 1235, 1233, 1, 0, 0, 0, 1235, 1236, 1, 0, 0, 0, 1236, 1237, 1, 0, 0, 0, 1237, 1238, 3, 110, 47, 0, 1238, 1240, 1, 0, 0, 0, 1239, 1233, 1, 0, 0, 0, 1239, 1240, 1, 0, 0, 0, 1240, 1242, 1, 0, 0, 0, 1241, 1243, 3, 282, 133, 0, 1242, 1241, 1, 0, 0, 0, 1243, 1244, 1, 0, 0, 0, 1244, 1242, 1, 0, 0, 0, 1244, 1245, 1, 0, 0, 0, 1245, 285, 1, 0, 0, 0, 1246, 1247, 3, 284, 134, 0, 1247, 1248, 1, 0, 0, 0, 1248, 1249, 6, 135, 29, 0, 1249, 287, 1, 0, 0, 0, 1250, 1251, 3, 66, 25, 0, 1251, 1252, 1, 0, 0, 0, 1252, 1253, 6, 136, 11, 0, 1253, 289, 1, 0, 0, 0, 1254, 1255, 3, 68, 26, 0, 1255, 1256, 1, 0, 0, 0, 1256, 1257, 6, 137, 11, 0, 1257, 291, 1, 0, 0, 0, 1258, 1259, 3, 70, 27, 0, 1259, 1260, 1, 0, 0, 0, 1260, 1261, 6, 138, 11, 0, 1261, 293, 1, 0, 0, 0, 1262, 1263, 3, 72, 28, 0, 1263, 1264, 1, 0, 0, 0, 1264, 1265, 6, 139, 16, 0, 1265, 1266, 6, 139, 12, 0, 1266, 1267, 6, 139, 12, 0, 1267, 295, 1, 0, 0, 0, 1268, 1269, 3, 106, 45, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 140, 20, 0, 1271, 297, 1, 0, 0, 0, 1272, 1273, 3, 112, 48, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 141, 19, 0, 1275, 299, 1, 0, 0, 0, 1276, 1277, 3, 116, 50, 0, 1277, 1278, 1, 0, 0, 0, 1278, 1279, 6, 142, 23, 0, 1279, 301, 1, 0, 0, 0, 1280, 1281, 3, 280, 132, 0, 1281, 1282, 1, 0, 0, 0, 1282, 1283, 6, 143, 30, 0, 1283, 303, 1, 0, 0, 0, 1284, 1285, 3, 244, 114, 0, 1285, 1286, 1, 0, 0, 0, 1286, 1287, 6, 144, 26, 0, 1287, 305, 1, 0, 0, 0, 1288, 1289, 3, 184, 84, 0, 1289, 1290, 1, 0, 0, 0, 1290, 1291, 6, 145, 31, 0, 1291, 307, 1, 0, 0, 0, 1292, 1293, 4, 146, 12, 0, 1293, 1294, 3, 140, 62, 0, 1294, 1295, 1, 0, 0, 0, 1295, 1296, 6, 146, 24, 0, 1296, 309, 1, 0, 0, 0, 1297, 1298, 4, 147, 13, 0, 1298, 1299, 3, 174, 79, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 147, 25, 0, 1301, 311, 1, 0, 0, 0, 1302, 1303, 3, 66, 25, 0, 1303, 1304, 1, 0, 0, 0, 1304, 1305, 6, 148, 11, 0, 1305, 313, 1, 0, 0, 0, 1306, 1307, 3, 68, 26, 0, 1307, 1308, 1, 0, 0, 0, 1308, 1309, 6, 149, 11, 0, 1309, 315, 1, 0, 0, 0, 1310, 1311, 3, 70, 27, 0, 1311, 1312, 1, 0, 0, 0, 1312, 1313, 6, 150, 11, 0, 1313, 317, 1, 0, 0, 0, 1314, 1315, 3, 72, 28, 0, 1315, 1316, 1, 0, 0, 0, 1316, 1317, 6, 151, 16, 0, 1317, 1318, 6, 151, 12, 0, 1318, 319, 1, 0, 0, 0, 1319, 1320, 3, 116, 50, 0, 1320, 1321, 1, 0, 0, 0, 1321, 1322, 6, 152, 23, 0, 1322, 321, 1, 0, 0, 0, 1323, 1324, 4, 153, 14, 0, 1324, 1325, 3, 140, 62, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 153, 24, 0, 1327, 323, 1, 0, 0, 0, 1328, 1329, 4, 154, 15, 0, 1329, 1330, 3, 174, 79, 0, 1330, 1331, 1, 0, 0, 0, 1331, 1332, 6, 154, 25, 0, 1332, 325, 1, 0, 0, 0, 1333, 1334, 3, 184, 84, 0, 1334, 1335, 1, 0, 0, 0, 1335, 1336, 6, 155, 31, 0, 1336, 327, 1, 0, 0, 0, 1337, 1338, 3, 180, 82, 0, 1338, 1339, 1, 0, 0, 0, 1339, 1340, 6, 156, 32, 0, 1340, 329, 1, 0, 0, 0, 1341, 1342, 3, 66, 25, 0, 1342, 1343, 1, 0, 0, 0, 1343, 1344, 6, 157, 11, 0, 1344, 331, 1, 0, 0, 0, 1345, 1346, 3, 68, 26, 0, 1346, 1347, 1, 0, 0, 0, 1347, 1348, 6, 158, 11, 0, 1348, 333, 1, 0, 0, 0, 1349, 1350, 3, 70, 27, 0, 1350, 1351, 1, 0, 0, 0, 1351, 1352, 6, 159, 11, 0, 1352, 335, 1, 0, 0, 0, 1353, 1354, 3, 72, 28, 0, 1354, 1355, 1, 0, 0, 0, 1355, 1356, 6, 160, 16, 0, 1356, 1357, 6, 160, 12, 0, 1357, 337, 1, 0, 0, 0, 1358, 1359, 7, 1, 0, 0, 1359, 1360, 7, 9, 0, 0, 1360, 1361, 7, 15, 0, 0, 1361, 1362, 7, 7, 0, 0, 1362, 339, 1, 0, 0, 0, 1363, 1364, 3, 66, 25, 0, 1364, 1365, 1, 0, 0, 0, 1365, 1366, 6, 162, 11, 0, 1366, 341, 1, 0, 0, 0, 1367, 1368, 3, 68, 26, 0, 1368, 1369, 1, 0, 0, 0, 1369, 1370, 6, 163, 11, 0, 1370, 343, 1, 0, 0, 0, 1371, 1372, 3, 70, 27, 0, 1372, 1373, 1, 0, 0, 0, 1373, 1374, 6, 164, 11, 0, 1374, 345, 1, 0, 0, 0, 1375, 1376, 3, 178, 81, 0, 1376, 1377, 1, 0, 0, 0, 1377, 1378, 6, 165, 17, 0, 1378, 1379, 6, 165, 12, 0, 1379, 347, 1, 0, 0, 0, 1380, 1381, 3, 110, 47, 0, 1381, 1382, 1, 0, 0, 0, 1382, 1383, 6, 166, 18, 0, 1383, 349, 1, 0, 0, 0, 1384, 1390, 3, 84, 34, 0, 1385, 1390, 3, 74, 29, 0, 1386, 1390, 3, 116, 50, 0, 1387, 1390, 3, 76, 30, 0, 1388, 1390, 3, 90, 37, 0, 1389, 1384, 1, 0, 0, 0, 1389, 1385, 1, 0, 0, 0, 1389, 1386, 1, 0, 0, 0, 1389, 1387, 1, 0, 0, 0, 1389, 1388, 1, 0, 0, 0, 1390, 1391, 1, 0, 0, 0, 1391, 1389, 1, 0, 0, 0, 1391, 1392, 1, 0, 0, 0, 1392, 351, 1, 0, 0, 0, 1393, 1394, 3, 66, 25, 0, 1394, 1395, 1, 0, 0, 0, 1395, 1396, 6, 168, 11, 0, 1396, 353, 1, 0, 0, 0, 1397, 1398, 3, 68, 26, 0, 1398, 1399, 1, 0, 0, 0, 1399, 1400, 6, 169, 11, 0, 1400, 355, 1, 0, 0, 0, 1401, 1402, 3, 70, 27, 0, 1402, 1403, 1, 0, 0, 0, 1403, 1404, 6, 170, 11, 0, 1404, 357, 1, 0, 0, 0, 1405, 1406, 3, 72, 28, 0, 1406, 1407, 1, 0, 0, 0, 1407, 1408, 6, 171, 16, 0, 1408, 1409, 6, 171, 12, 0, 1409, 359, 1, 0, 0, 0, 1410, 1411, 3, 110, 47, 0, 1411, 1412, 1, 0, 0, 0, 1412, 1413, 6, 172, 18, 0, 1413, 361, 1, 0, 0, 0, 1414, 1415, 3, 112, 48, 0, 1415, 1416, 1, 0, 0, 0, 1416, 1417, 6, 173, 19, 0, 1417, 363, 1, 0, 0, 0, 1418, 1419, 3, 116, 50, 0, 1419, 1420, 1, 0, 0, 0, 1420, 1421, 6, 174, 23, 0, 1421, 365, 1, 0, 0, 0, 1422, 1423, 3, 278, 131, 0, 1423, 1424, 1, 0, 0, 0, 1424, 1425, 6, 175, 33, 0, 1425, 1426, 6, 175, 34, 0, 1426, 367, 1, 0, 0, 0, 1427, 1428, 3, 218, 101, 0, 1428, 1429, 1, 0, 0, 0, 1429, 1430, 6, 176, 21, 0, 1430, 369, 1, 0, 0, 0, 1431, 1432, 3, 94, 39, 0, 1432, 1433, 1, 0, 0, 0, 1433, 1434, 6, 177, 22, 0, 1434, 371, 1, 0, 0, 0, 1435, 1436, 3, 66, 25, 0, 1436, 1437, 1, 0, 0, 0, 1437, 1438, 6, 178, 11, 0, 1438, 373, 1, 0, 0, 0, 1439, 1440, 3, 68, 26, 0, 1440, 1441, 1, 0, 0, 0, 1441, 1442, 6, 179, 11, 0, 1442, 375, 1, 0, 0, 0, 1443, 1444, 3, 70, 27, 0, 1444, 1445, 1, 0, 0, 0, 1445, 1446, 6, 180, 11, 0, 1446, 377, 1, 0, 0, 0, 1447, 1448, 3, 72, 28, 0, 1448, 1449, 1, 0, 0, 0, 1449, 1450, 6, 181, 16, 0, 1450, 1451, 6, 181, 12, 0, 1451, 1452, 6, 181, 12, 0, 1452, 379, 1, 0, 0, 0, 1453, 1454, 3, 112, 48, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 182, 19, 0, 1456, 381, 1, 0, 0, 0, 1457, 1458, 3, 116, 50, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 183, 23, 0, 1460, 383, 1, 0, 0, 0, 1461, 1462, 3, 244, 114, 0, 1462, 1463, 1, 0, 0, 0, 1463, 1464, 6, 184, 26, 0, 1464, 385, 1, 0, 0, 0, 1465, 1466, 3, 66, 25, 0, 1466, 1467, 1, 0, 0, 0, 1467, 1468, 6, 185, 11, 0, 1468, 387, 1, 0, 0, 0, 1469, 1470, 3, 68, 26, 0, 1470, 1471, 1, 0, 0, 0, 1471, 1472, 6, 186, 11, 0, 1472, 389, 1, 0, 0, 0, 1473, 1474, 3, 70, 27, 0, 1474, 1475, 1, 0, 0, 0, 1475, 1476, 6, 187, 11, 0, 1476, 391, 1, 0, 0, 0, 1477, 1478, 3, 72, 28, 0, 1478, 1479, 1, 0, 0, 0, 1479, 1480, 6, 188, 16, 0, 1480, 1481, 6, 188, 12, 0, 1481, 393, 1, 0, 0, 0, 1482, 1483, 3, 54, 19, 0, 1483, 1484, 1, 0, 0, 0, 1484, 1485, 6, 189, 35, 0, 1485, 395, 1, 0, 0, 0, 1486, 1487, 3, 264, 124, 0, 1487, 1488, 1, 0, 0, 0, 1488, 1489, 6, 190, 36, 0, 1489, 397, 1, 0, 0, 0, 1490, 1491, 3, 278, 131, 0, 1491, 1492, 1, 0, 0, 0, 1492, 1493, 6, 191, 33, 0, 1493, 1494, 6, 191, 12, 0, 1494, 1495, 6, 191, 0, 0, 1495, 399, 1, 0, 0, 0, 1496, 1497, 7, 20, 0, 0, 1497, 1498, 7, 2, 0, 0, 1498, 1499, 7, 1, 0, 0, 1499, 1500, 7, 9, 0, 0, 1500, 1501, 7, 17, 0, 0, 1501, 1502, 1, 0, 0, 0, 1502, 1503, 6, 192, 12, 0, 1503, 1504, 6, 192, 0, 0, 1504, 401, 1, 0, 0, 0, 1505, 1506, 3, 180, 82, 0, 1506, 1507, 1, 0, 0, 0, 1507, 1508, 6, 193, 32, 0, 1508, 403, 1, 0, 0, 0, 1509, 1510, 3, 184, 84, 0, 1510, 1511, 1, 0, 0, 0, 1511, 1512, 6, 194, 31, 0, 1512, 405, 1, 0, 0, 0, 1513, 1514, 3, 66, 25, 0, 1514, 1515, 1, 0, 0, 0, 1515, 1516, 6, 195, 11, 0, 1516, 407, 1, 0, 0, 0, 1517, 1518, 3, 68, 26, 0, 1518, 1519, 1, 0, 0, 0, 1519, 1520, 6, 196, 11, 0, 1520, 409, 1, 0, 0, 0, 1521, 1522, 3, 70, 27, 0, 1522, 1523, 1, 0, 0, 0, 1523, 1524, 6, 197, 11, 0, 1524, 411, 1, 0, 0, 0, 1525, 1526, 3, 72, 28, 0, 1526, 1527, 1, 0, 0, 0, 1527, 1528, 6, 198, 16, 0, 1528, 1529, 6, 198, 12, 0, 1529, 413, 1, 0, 0, 0, 1530, 1531, 3, 218, 101, 0, 1531, 1532, 1, 0, 0, 0, 1532, 1533, 6, 199, 21, 0, 1533, 1534, 6, 199, 12, 0, 1534, 1535, 6, 199, 37, 0, 1535, 415, 1, 0, 0, 0, 1536, 1537, 3, 94, 39, 0, 1537, 1538, 1, 0, 0, 0, 1538, 1539, 6, 200, 22, 0, 1539, 1540, 6, 200, 12, 0, 1540, 1541, 6, 200, 37, 0, 1541, 417, 1, 0, 0, 0, 1542, 1543, 3, 66, 25, 0, 1543, 1544, 1, 0, 0, 0, 1544, 1545, 6, 201, 11, 0, 1545, 419, 1, 0, 0, 0, 1546, 1547, 3, 68, 26, 0, 1547, 1548, 1, 0, 0, 0, 1548, 1549, 6, 202, 11, 0, 1549, 421, 1, 0, 0, 0, 1550, 1551, 3, 70, 27, 0, 1551, 1552, 1, 0, 0, 0, 1552, 1553, 6, 203, 11, 0, 1553, 423, 1, 0, 0, 0, 1554, 1555, 3, 110, 47, 0, 1555, 1556, 1, 0, 0, 0, 1556, 1557, 6, 204, 18, 0, 1557, 1558, 6, 204, 12, 0, 1558, 1559, 6, 204, 9, 0, 1559, 425, 1, 0, 0, 0, 1560, 1561, 3, 112, 48, 0, 1561, 1562, 1, 0, 0, 0, 1562, 1563, 6, 205, 19, 0, 1563, 1564, 6, 205, 12, 0, 1564, 1565, 6, 205, 9, 0, 1565, 427, 1, 0, 0, 0, 1566, 1567, 3, 66, 25, 0, 1567, 1568, 1, 0, 0, 0, 1568, 1569, 6, 206, 11, 0, 1569, 429, 1, 0, 0, 0, 1570, 1571, 3, 68, 26, 0, 1571, 1572, 1, 0, 0, 0, 1572, 1573, 6, 207, 11, 0, 1573, 431, 1, 0, 0, 0, 1574, 1575, 3, 70, 27, 0, 1575, 1576, 1, 0, 0, 0, 1576, 1577, 6, 208, 11, 0, 1577, 433, 1, 0, 0, 0, 1578, 1579, 3, 184, 84, 0, 1579, 1580, 1, 0, 0, 0, 1580, 1581, 6, 209, 12, 0, 1581, 1582, 6, 209, 0, 0, 1582, 1583, 6, 209, 31, 0, 1583, 435, 1, 0, 0, 0, 1584, 1585, 3, 180, 82, 0, 1585, 1586, 1, 0, 0, 0, 1586, 1587, 6, 210, 12, 0, 1587, 1588, 6, 210, 0, 0, 1588, 1589, 6, 210, 32, 0, 1589, 437, 1, 0, 0, 0, 1590, 1591, 3, 100, 42, 0, 1591, 1592, 1, 0, 0, 0, 1592, 1593, 6, 211, 12, 0, 1593, 1594, 6, 211, 0, 0, 1594, 1595, 6, 211, 38, 0, 1595, 439, 1, 0, 0, 0, 1596, 1597, 3, 72, 28, 0, 1597, 1598, 1, 0, 0, 0, 1598, 1599, 6, 212, 16, 0, 1599, 1600, 6, 212, 12, 0, 1600, 441, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 655, 665, 669, 672, 681, 683, 694, 713, 718, 727, 734, 739, 741, 752, 760, 763, 765, 770, 775, 781, 788, 793, 799, 802, 810, 814, 942, 947, 954, 956, 972, 977, 982, 984, 990, 1067, 1072, 1121, 1125, 1130, 1135, 1140, 1142, 1146, 1148, 1235, 1239, 1244, 1389, 1391, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 70, 0, 5, 0, 0, 7, 29, 0, 7, 71, 0, 7, 38, 0, 7, 39, 0, 7, 36, 0, 7, 81, 0, 7, 30, 0, 7, 41, 0, 7, 53, 0, 7, 69, 0, 7, 85, 0, 5, 10, 0, 5, 7, 0, 7, 95, 0, 7, 94, 0, 7, 73, 0, 7, 72, 0, 7, 93, 0, 5, 12, 0, 7, 20, 0, 7, 89, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file +[4, 0, 130, 1611, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 2, 143, 7, 143, 2, 144, 7, 144, 2, 145, 7, 145, 2, 146, 7, 146, 2, 147, 7, 147, 2, 148, 7, 148, 2, 149, 7, 149, 2, 150, 7, 150, 2, 151, 7, 151, 2, 152, 7, 152, 2, 153, 7, 153, 2, 154, 7, 154, 2, 155, 7, 155, 2, 156, 7, 156, 2, 157, 7, 157, 2, 158, 7, 158, 2, 159, 7, 159, 2, 160, 7, 160, 2, 161, 7, 161, 2, 162, 7, 162, 2, 163, 7, 163, 2, 164, 7, 164, 2, 165, 7, 165, 2, 166, 7, 166, 2, 167, 7, 167, 2, 168, 7, 168, 2, 169, 7, 169, 2, 170, 7, 170, 2, 171, 7, 171, 2, 172, 7, 172, 2, 173, 7, 173, 2, 174, 7, 174, 2, 175, 7, 175, 2, 176, 7, 176, 2, 177, 7, 177, 2, 178, 7, 178, 2, 179, 7, 179, 2, 180, 7, 180, 2, 181, 7, 181, 2, 182, 7, 182, 2, 183, 7, 183, 2, 184, 7, 184, 2, 185, 7, 185, 2, 186, 7, 186, 2, 187, 7, 187, 2, 188, 7, 188, 2, 189, 7, 189, 2, 190, 7, 190, 2, 191, 7, 191, 2, 192, 7, 192, 2, 193, 7, 193, 2, 194, 7, 194, 2, 195, 7, 195, 2, 196, 7, 196, 2, 197, 7, 197, 2, 198, 7, 198, 2, 199, 7, 199, 2, 200, 7, 200, 2, 201, 7, 201, 2, 202, 7, 202, 2, 203, 7, 203, 2, 204, 7, 204, 2, 205, 7, 205, 2, 206, 7, 206, 2, 207, 7, 207, 2, 208, 7, 208, 2, 209, 7, 209, 2, 210, 7, 210, 2, 211, 7, 211, 2, 212, 7, 212, 2, 213, 7, 213, 2, 214, 7, 214, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 4, 24, 658, 8, 24, 11, 24, 12, 24, 659, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 668, 8, 25, 10, 25, 12, 25, 671, 9, 25, 1, 25, 3, 25, 674, 8, 25, 1, 25, 3, 25, 677, 8, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 5, 26, 686, 8, 26, 10, 26, 12, 26, 689, 9, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 4, 27, 697, 8, 27, 11, 27, 12, 27, 698, 1, 27, 1, 27, 1, 28, 1, 28, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 32, 1, 32, 1, 33, 1, 33, 3, 33, 718, 8, 33, 1, 33, 4, 33, 721, 8, 33, 11, 33, 12, 33, 722, 1, 34, 1, 34, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 3, 36, 732, 8, 36, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 3, 38, 739, 8, 38, 1, 39, 1, 39, 1, 39, 5, 39, 744, 8, 39, 10, 39, 12, 39, 747, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 5, 39, 755, 8, 39, 10, 39, 12, 39, 758, 9, 39, 1, 39, 1, 39, 1, 39, 1, 39, 1, 39, 3, 39, 765, 8, 39, 1, 39, 3, 39, 768, 8, 39, 3, 39, 770, 8, 39, 1, 40, 4, 40, 773, 8, 40, 11, 40, 12, 40, 774, 1, 41, 4, 41, 778, 8, 41, 11, 41, 12, 41, 779, 1, 41, 1, 41, 5, 41, 784, 8, 41, 10, 41, 12, 41, 787, 9, 41, 1, 41, 1, 41, 4, 41, 791, 8, 41, 11, 41, 12, 41, 792, 1, 41, 4, 41, 796, 8, 41, 11, 41, 12, 41, 797, 1, 41, 1, 41, 5, 41, 802, 8, 41, 10, 41, 12, 41, 805, 9, 41, 3, 41, 807, 8, 41, 1, 41, 1, 41, 1, 41, 1, 41, 4, 41, 813, 8, 41, 11, 41, 12, 41, 814, 1, 41, 1, 41, 3, 41, 819, 8, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 75, 1, 75, 1, 76, 1, 76, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 3, 81, 953, 8, 81, 1, 81, 5, 81, 956, 8, 81, 10, 81, 12, 81, 959, 9, 81, 1, 81, 1, 81, 4, 81, 963, 8, 81, 11, 81, 12, 81, 964, 3, 81, 967, 8, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 5, 84, 981, 8, 84, 10, 84, 12, 84, 984, 9, 84, 1, 84, 1, 84, 3, 84, 988, 8, 84, 1, 84, 4, 84, 991, 8, 84, 11, 84, 12, 84, 992, 3, 84, 995, 8, 84, 1, 85, 1, 85, 4, 85, 999, 8, 85, 11, 85, 12, 85, 1000, 1, 85, 1, 85, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 1, 87, 1, 88, 1, 88, 1, 88, 1, 88, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 1, 97, 1, 97, 1, 97, 1, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 3, 102, 1078, 8, 102, 1, 103, 4, 103, 1081, 8, 103, 11, 103, 12, 103, 1082, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 3, 114, 1132, 8, 114, 1, 115, 1, 115, 3, 115, 1136, 8, 115, 1, 115, 5, 115, 1139, 8, 115, 10, 115, 12, 115, 1142, 9, 115, 1, 115, 1, 115, 3, 115, 1146, 8, 115, 1, 115, 4, 115, 1149, 8, 115, 11, 115, 12, 115, 1150, 3, 115, 1153, 8, 115, 1, 116, 1, 116, 4, 116, 1157, 8, 116, 11, 116, 12, 116, 1158, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 136, 4, 136, 1244, 8, 136, 11, 136, 12, 136, 1245, 1, 136, 1, 136, 3, 136, 1250, 8, 136, 1, 136, 4, 136, 1253, 8, 136, 11, 136, 12, 136, 1254, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 1, 143, 1, 143, 1, 143, 1, 143, 1, 144, 1, 144, 1, 144, 1, 144, 1, 145, 1, 145, 1, 145, 1, 145, 1, 146, 1, 146, 1, 146, 1, 146, 1, 147, 1, 147, 1, 147, 1, 147, 1, 148, 1, 148, 1, 148, 1, 148, 1, 148, 1, 149, 1, 149, 1, 149, 1, 149, 1, 149, 1, 150, 1, 150, 1, 150, 1, 150, 1, 151, 1, 151, 1, 151, 1, 151, 1, 152, 1, 152, 1, 152, 1, 152, 1, 153, 1, 153, 1, 153, 1, 153, 1, 153, 1, 154, 1, 154, 1, 154, 1, 154, 1, 155, 1, 155, 1, 155, 1, 155, 1, 155, 1, 156, 1, 156, 1, 156, 1, 156, 1, 156, 1, 157, 1, 157, 1, 157, 1, 157, 1, 158, 1, 158, 1, 158, 1, 158, 1, 159, 1, 159, 1, 159, 1, 159, 1, 160, 1, 160, 1, 160, 1, 160, 1, 161, 1, 161, 1, 161, 1, 161, 1, 162, 1, 162, 1, 162, 1, 162, 1, 162, 1, 163, 1, 163, 1, 163, 1, 163, 1, 163, 1, 164, 1, 164, 1, 164, 1, 164, 1, 165, 1, 165, 1, 165, 1, 165, 1, 166, 1, 166, 1, 166, 1, 166, 1, 167, 1, 167, 1, 167, 1, 167, 1, 167, 1, 168, 1, 168, 1, 168, 1, 168, 1, 169, 1, 169, 1, 169, 1, 169, 1, 169, 4, 169, 1400, 8, 169, 11, 169, 12, 169, 1401, 1, 170, 1, 170, 1, 170, 1, 170, 1, 171, 1, 171, 1, 171, 1, 171, 1, 172, 1, 172, 1, 172, 1, 172, 1, 173, 1, 173, 1, 173, 1, 173, 1, 173, 1, 174, 1, 174, 1, 174, 1, 174, 1, 175, 1, 175, 1, 175, 1, 175, 1, 176, 1, 176, 1, 176, 1, 176, 1, 177, 1, 177, 1, 177, 1, 177, 1, 177, 1, 178, 1, 178, 1, 178, 1, 178, 1, 179, 1, 179, 1, 179, 1, 179, 1, 180, 1, 180, 1, 180, 1, 180, 1, 181, 1, 181, 1, 181, 1, 181, 1, 182, 1, 182, 1, 182, 1, 182, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 183, 1, 184, 1, 184, 1, 184, 1, 184, 1, 185, 1, 185, 1, 185, 1, 185, 1, 186, 1, 186, 1, 186, 1, 186, 1, 187, 1, 187, 1, 187, 1, 187, 1, 188, 1, 188, 1, 188, 1, 188, 1, 189, 1, 189, 1, 189, 1, 189, 1, 190, 1, 190, 1, 190, 1, 190, 1, 190, 1, 191, 1, 191, 1, 191, 1, 191, 1, 192, 1, 192, 1, 192, 1, 192, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 193, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 194, 1, 195, 1, 195, 1, 195, 1, 195, 1, 196, 1, 196, 1, 196, 1, 196, 1, 197, 1, 197, 1, 197, 1, 197, 1, 198, 1, 198, 1, 198, 1, 198, 1, 199, 1, 199, 1, 199, 1, 199, 1, 200, 1, 200, 1, 200, 1, 200, 1, 200, 1, 201, 1, 201, 1, 201, 1, 201, 1, 201, 1, 201, 1, 202, 1, 202, 1, 202, 1, 202, 1, 202, 1, 202, 1, 203, 1, 203, 1, 203, 1, 203, 1, 204, 1, 204, 1, 204, 1, 204, 1, 205, 1, 205, 1, 205, 1, 205, 1, 206, 1, 206, 1, 206, 1, 206, 1, 206, 1, 206, 1, 207, 1, 207, 1, 207, 1, 207, 1, 207, 1, 207, 1, 208, 1, 208, 1, 208, 1, 208, 1, 209, 1, 209, 1, 209, 1, 209, 1, 210, 1, 210, 1, 210, 1, 210, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 211, 1, 212, 1, 212, 1, 212, 1, 212, 1, 212, 1, 212, 1, 213, 1, 213, 1, 213, 1, 213, 1, 213, 1, 213, 1, 214, 1, 214, 1, 214, 1, 214, 1, 214, 2, 687, 756, 0, 215, 16, 1, 18, 2, 20, 3, 22, 4, 24, 5, 26, 6, 28, 7, 30, 8, 32, 9, 34, 10, 36, 11, 38, 12, 40, 13, 42, 14, 44, 15, 46, 16, 48, 17, 50, 18, 52, 19, 54, 20, 56, 21, 58, 22, 60, 23, 62, 24, 64, 25, 66, 26, 68, 27, 70, 28, 72, 29, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 0, 88, 0, 90, 0, 92, 0, 94, 30, 96, 31, 98, 32, 100, 33, 102, 34, 104, 35, 106, 36, 108, 37, 110, 38, 112, 39, 114, 40, 116, 41, 118, 42, 120, 43, 122, 44, 124, 45, 126, 46, 128, 47, 130, 48, 132, 49, 134, 50, 136, 51, 138, 52, 140, 53, 142, 54, 144, 55, 146, 56, 148, 57, 150, 58, 152, 59, 154, 60, 156, 61, 158, 62, 160, 63, 162, 64, 164, 65, 166, 66, 168, 67, 170, 68, 172, 69, 174, 70, 176, 0, 178, 71, 180, 72, 182, 73, 184, 74, 186, 0, 188, 75, 190, 76, 192, 77, 194, 78, 196, 0, 198, 0, 200, 79, 202, 80, 204, 81, 206, 0, 208, 0, 210, 0, 212, 0, 214, 0, 216, 0, 218, 82, 220, 0, 222, 83, 224, 0, 226, 0, 228, 84, 230, 85, 232, 86, 234, 0, 236, 0, 238, 0, 240, 0, 242, 0, 244, 0, 246, 0, 248, 87, 250, 88, 252, 89, 254, 90, 256, 0, 258, 0, 260, 0, 262, 0, 264, 0, 266, 0, 268, 91, 270, 0, 272, 92, 274, 93, 276, 94, 278, 0, 280, 0, 282, 95, 284, 96, 286, 0, 288, 97, 290, 0, 292, 98, 294, 99, 296, 100, 298, 0, 300, 0, 302, 0, 304, 0, 306, 0, 308, 0, 310, 0, 312, 0, 314, 0, 316, 101, 318, 102, 320, 103, 322, 0, 324, 0, 326, 0, 328, 0, 330, 0, 332, 0, 334, 104, 336, 105, 338, 106, 340, 0, 342, 107, 344, 108, 346, 109, 348, 110, 350, 0, 352, 0, 354, 111, 356, 112, 358, 113, 360, 114, 362, 0, 364, 0, 366, 0, 368, 0, 370, 0, 372, 0, 374, 0, 376, 115, 378, 116, 380, 117, 382, 0, 384, 0, 386, 0, 388, 0, 390, 118, 392, 119, 394, 120, 396, 0, 398, 0, 400, 0, 402, 0, 404, 121, 406, 0, 408, 0, 410, 122, 412, 123, 414, 124, 416, 0, 418, 0, 420, 0, 422, 125, 424, 126, 426, 127, 428, 0, 430, 0, 432, 128, 434, 129, 436, 130, 438, 0, 440, 0, 442, 0, 444, 0, 16, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 36, 2, 0, 68, 68, 100, 100, 2, 0, 73, 73, 105, 105, 2, 0, 83, 83, 115, 115, 2, 0, 69, 69, 101, 101, 2, 0, 67, 67, 99, 99, 2, 0, 84, 84, 116, 116, 2, 0, 82, 82, 114, 114, 2, 0, 79, 79, 111, 111, 2, 0, 80, 80, 112, 112, 2, 0, 78, 78, 110, 110, 2, 0, 72, 72, 104, 104, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 76, 76, 108, 108, 2, 0, 88, 88, 120, 120, 2, 0, 70, 70, 102, 102, 2, 0, 77, 77, 109, 109, 2, 0, 71, 71, 103, 103, 2, 0, 75, 75, 107, 107, 2, 0, 87, 87, 119, 119, 2, 0, 85, 85, 117, 117, 2, 0, 74, 74, 106, 106, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 8, 0, 34, 34, 78, 78, 82, 82, 84, 84, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 2, 0, 66, 66, 98, 98, 2, 0, 89, 89, 121, 121, 11, 0, 9, 10, 13, 13, 32, 32, 34, 34, 44, 44, 47, 47, 58, 58, 61, 61, 91, 91, 93, 93, 124, 124, 2, 0, 42, 42, 47, 47, 11, 0, 9, 10, 13, 13, 32, 32, 34, 35, 44, 44, 47, 47, 58, 58, 60, 60, 62, 63, 92, 92, 124, 124, 1638, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 0, 54, 1, 0, 0, 0, 0, 56, 1, 0, 0, 0, 0, 58, 1, 0, 0, 0, 0, 60, 1, 0, 0, 0, 0, 62, 1, 0, 0, 0, 0, 64, 1, 0, 0, 0, 0, 66, 1, 0, 0, 0, 0, 68, 1, 0, 0, 0, 0, 70, 1, 0, 0, 0, 1, 72, 1, 0, 0, 0, 1, 94, 1, 0, 0, 0, 1, 96, 1, 0, 0, 0, 1, 98, 1, 0, 0, 0, 1, 100, 1, 0, 0, 0, 1, 102, 1, 0, 0, 0, 1, 104, 1, 0, 0, 0, 1, 106, 1, 0, 0, 0, 1, 108, 1, 0, 0, 0, 1, 110, 1, 0, 0, 0, 1, 112, 1, 0, 0, 0, 1, 114, 1, 0, 0, 0, 1, 116, 1, 0, 0, 0, 1, 118, 1, 0, 0, 0, 1, 120, 1, 0, 0, 0, 1, 122, 1, 0, 0, 0, 1, 124, 1, 0, 0, 0, 1, 126, 1, 0, 0, 0, 1, 128, 1, 0, 0, 0, 1, 130, 1, 0, 0, 0, 1, 132, 1, 0, 0, 0, 1, 134, 1, 0, 0, 0, 1, 136, 1, 0, 0, 0, 1, 138, 1, 0, 0, 0, 1, 140, 1, 0, 0, 0, 1, 142, 1, 0, 0, 0, 1, 144, 1, 0, 0, 0, 1, 146, 1, 0, 0, 0, 1, 148, 1, 0, 0, 0, 1, 150, 1, 0, 0, 0, 1, 152, 1, 0, 0, 0, 1, 154, 1, 0, 0, 0, 1, 156, 1, 0, 0, 0, 1, 158, 1, 0, 0, 0, 1, 160, 1, 0, 0, 0, 1, 162, 1, 0, 0, 0, 1, 164, 1, 0, 0, 0, 1, 166, 1, 0, 0, 0, 1, 168, 1, 0, 0, 0, 1, 170, 1, 0, 0, 0, 1, 172, 1, 0, 0, 0, 1, 174, 1, 0, 0, 0, 1, 176, 1, 0, 0, 0, 1, 178, 1, 0, 0, 0, 1, 180, 1, 0, 0, 0, 1, 182, 1, 0, 0, 0, 1, 184, 1, 0, 0, 0, 1, 188, 1, 0, 0, 0, 1, 190, 1, 0, 0, 0, 1, 192, 1, 0, 0, 0, 1, 194, 1, 0, 0, 0, 2, 196, 1, 0, 0, 0, 2, 198, 1, 0, 0, 0, 2, 200, 1, 0, 0, 0, 2, 202, 1, 0, 0, 0, 2, 204, 1, 0, 0, 0, 3, 206, 1, 0, 0, 0, 3, 208, 1, 0, 0, 0, 3, 210, 1, 0, 0, 0, 3, 212, 1, 0, 0, 0, 3, 214, 1, 0, 0, 0, 3, 216, 1, 0, 0, 0, 3, 218, 1, 0, 0, 0, 3, 222, 1, 0, 0, 0, 3, 224, 1, 0, 0, 0, 3, 226, 1, 0, 0, 0, 3, 228, 1, 0, 0, 0, 3, 230, 1, 0, 0, 0, 3, 232, 1, 0, 0, 0, 4, 234, 1, 0, 0, 0, 4, 236, 1, 0, 0, 0, 4, 238, 1, 0, 0, 0, 4, 240, 1, 0, 0, 0, 4, 242, 1, 0, 0, 0, 4, 248, 1, 0, 0, 0, 4, 250, 1, 0, 0, 0, 4, 252, 1, 0, 0, 0, 4, 254, 1, 0, 0, 0, 5, 256, 1, 0, 0, 0, 5, 258, 1, 0, 0, 0, 5, 260, 1, 0, 0, 0, 5, 262, 1, 0, 0, 0, 5, 264, 1, 0, 0, 0, 5, 266, 1, 0, 0, 0, 5, 268, 1, 0, 0, 0, 5, 270, 1, 0, 0, 0, 5, 272, 1, 0, 0, 0, 5, 274, 1, 0, 0, 0, 5, 276, 1, 0, 0, 0, 6, 278, 1, 0, 0, 0, 6, 280, 1, 0, 0, 0, 6, 282, 1, 0, 0, 0, 6, 284, 1, 0, 0, 0, 6, 288, 1, 0, 0, 0, 6, 290, 1, 0, 0, 0, 6, 292, 1, 0, 0, 0, 6, 294, 1, 0, 0, 0, 6, 296, 1, 0, 0, 0, 7, 298, 1, 0, 0, 0, 7, 300, 1, 0, 0, 0, 7, 302, 1, 0, 0, 0, 7, 304, 1, 0, 0, 0, 7, 306, 1, 0, 0, 0, 7, 308, 1, 0, 0, 0, 7, 310, 1, 0, 0, 0, 7, 312, 1, 0, 0, 0, 7, 314, 1, 0, 0, 0, 7, 316, 1, 0, 0, 0, 7, 318, 1, 0, 0, 0, 7, 320, 1, 0, 0, 0, 8, 322, 1, 0, 0, 0, 8, 324, 1, 0, 0, 0, 8, 326, 1, 0, 0, 0, 8, 328, 1, 0, 0, 0, 8, 330, 1, 0, 0, 0, 8, 332, 1, 0, 0, 0, 8, 334, 1, 0, 0, 0, 8, 336, 1, 0, 0, 0, 8, 338, 1, 0, 0, 0, 9, 340, 1, 0, 0, 0, 9, 342, 1, 0, 0, 0, 9, 344, 1, 0, 0, 0, 9, 346, 1, 0, 0, 0, 9, 348, 1, 0, 0, 0, 10, 350, 1, 0, 0, 0, 10, 352, 1, 0, 0, 0, 10, 354, 1, 0, 0, 0, 10, 356, 1, 0, 0, 0, 10, 358, 1, 0, 0, 0, 10, 360, 1, 0, 0, 0, 11, 362, 1, 0, 0, 0, 11, 364, 1, 0, 0, 0, 11, 366, 1, 0, 0, 0, 11, 368, 1, 0, 0, 0, 11, 370, 1, 0, 0, 0, 11, 372, 1, 0, 0, 0, 11, 374, 1, 0, 0, 0, 11, 376, 1, 0, 0, 0, 11, 378, 1, 0, 0, 0, 11, 380, 1, 0, 0, 0, 12, 382, 1, 0, 0, 0, 12, 384, 1, 0, 0, 0, 12, 386, 1, 0, 0, 0, 12, 388, 1, 0, 0, 0, 12, 390, 1, 0, 0, 0, 12, 392, 1, 0, 0, 0, 12, 394, 1, 0, 0, 0, 13, 396, 1, 0, 0, 0, 13, 398, 1, 0, 0, 0, 13, 400, 1, 0, 0, 0, 13, 402, 1, 0, 0, 0, 13, 404, 1, 0, 0, 0, 13, 406, 1, 0, 0, 0, 13, 408, 1, 0, 0, 0, 13, 410, 1, 0, 0, 0, 13, 412, 1, 0, 0, 0, 13, 414, 1, 0, 0, 0, 14, 416, 1, 0, 0, 0, 14, 418, 1, 0, 0, 0, 14, 420, 1, 0, 0, 0, 14, 422, 1, 0, 0, 0, 14, 424, 1, 0, 0, 0, 14, 426, 1, 0, 0, 0, 15, 428, 1, 0, 0, 0, 15, 430, 1, 0, 0, 0, 15, 432, 1, 0, 0, 0, 15, 434, 1, 0, 0, 0, 15, 436, 1, 0, 0, 0, 15, 438, 1, 0, 0, 0, 15, 440, 1, 0, 0, 0, 15, 442, 1, 0, 0, 0, 15, 444, 1, 0, 0, 0, 16, 446, 1, 0, 0, 0, 18, 456, 1, 0, 0, 0, 20, 463, 1, 0, 0, 0, 22, 472, 1, 0, 0, 0, 24, 479, 1, 0, 0, 0, 26, 489, 1, 0, 0, 0, 28, 496, 1, 0, 0, 0, 30, 503, 1, 0, 0, 0, 32, 510, 1, 0, 0, 0, 34, 518, 1, 0, 0, 0, 36, 530, 1, 0, 0, 0, 38, 539, 1, 0, 0, 0, 40, 545, 1, 0, 0, 0, 42, 552, 1, 0, 0, 0, 44, 559, 1, 0, 0, 0, 46, 567, 1, 0, 0, 0, 48, 575, 1, 0, 0, 0, 50, 590, 1, 0, 0, 0, 52, 602, 1, 0, 0, 0, 54, 613, 1, 0, 0, 0, 56, 621, 1, 0, 0, 0, 58, 629, 1, 0, 0, 0, 60, 637, 1, 0, 0, 0, 62, 646, 1, 0, 0, 0, 64, 657, 1, 0, 0, 0, 66, 663, 1, 0, 0, 0, 68, 680, 1, 0, 0, 0, 70, 696, 1, 0, 0, 0, 72, 702, 1, 0, 0, 0, 74, 706, 1, 0, 0, 0, 76, 708, 1, 0, 0, 0, 78, 710, 1, 0, 0, 0, 80, 713, 1, 0, 0, 0, 82, 715, 1, 0, 0, 0, 84, 724, 1, 0, 0, 0, 86, 726, 1, 0, 0, 0, 88, 731, 1, 0, 0, 0, 90, 733, 1, 0, 0, 0, 92, 738, 1, 0, 0, 0, 94, 769, 1, 0, 0, 0, 96, 772, 1, 0, 0, 0, 98, 818, 1, 0, 0, 0, 100, 820, 1, 0, 0, 0, 102, 823, 1, 0, 0, 0, 104, 827, 1, 0, 0, 0, 106, 831, 1, 0, 0, 0, 108, 833, 1, 0, 0, 0, 110, 836, 1, 0, 0, 0, 112, 838, 1, 0, 0, 0, 114, 840, 1, 0, 0, 0, 116, 845, 1, 0, 0, 0, 118, 847, 1, 0, 0, 0, 120, 853, 1, 0, 0, 0, 122, 859, 1, 0, 0, 0, 124, 862, 1, 0, 0, 0, 126, 865, 1, 0, 0, 0, 128, 870, 1, 0, 0, 0, 130, 875, 1, 0, 0, 0, 132, 877, 1, 0, 0, 0, 134, 881, 1, 0, 0, 0, 136, 886, 1, 0, 0, 0, 138, 892, 1, 0, 0, 0, 140, 895, 1, 0, 0, 0, 142, 897, 1, 0, 0, 0, 144, 903, 1, 0, 0, 0, 146, 905, 1, 0, 0, 0, 148, 910, 1, 0, 0, 0, 150, 913, 1, 0, 0, 0, 152, 916, 1, 0, 0, 0, 154, 919, 1, 0, 0, 0, 156, 921, 1, 0, 0, 0, 158, 924, 1, 0, 0, 0, 160, 926, 1, 0, 0, 0, 162, 929, 1, 0, 0, 0, 164, 931, 1, 0, 0, 0, 166, 933, 1, 0, 0, 0, 168, 935, 1, 0, 0, 0, 170, 937, 1, 0, 0, 0, 172, 939, 1, 0, 0, 0, 174, 942, 1, 0, 0, 0, 176, 945, 1, 0, 0, 0, 178, 966, 1, 0, 0, 0, 180, 968, 1, 0, 0, 0, 182, 973, 1, 0, 0, 0, 184, 994, 1, 0, 0, 0, 186, 996, 1, 0, 0, 0, 188, 1004, 1, 0, 0, 0, 190, 1006, 1, 0, 0, 0, 192, 1010, 1, 0, 0, 0, 194, 1014, 1, 0, 0, 0, 196, 1018, 1, 0, 0, 0, 198, 1023, 1, 0, 0, 0, 200, 1028, 1, 0, 0, 0, 202, 1032, 1, 0, 0, 0, 204, 1036, 1, 0, 0, 0, 206, 1040, 1, 0, 0, 0, 208, 1045, 1, 0, 0, 0, 210, 1049, 1, 0, 0, 0, 212, 1053, 1, 0, 0, 0, 214, 1057, 1, 0, 0, 0, 216, 1061, 1, 0, 0, 0, 218, 1065, 1, 0, 0, 0, 220, 1077, 1, 0, 0, 0, 222, 1080, 1, 0, 0, 0, 224, 1084, 1, 0, 0, 0, 226, 1088, 1, 0, 0, 0, 228, 1092, 1, 0, 0, 0, 230, 1096, 1, 0, 0, 0, 232, 1100, 1, 0, 0, 0, 234, 1104, 1, 0, 0, 0, 236, 1109, 1, 0, 0, 0, 238, 1113, 1, 0, 0, 0, 240, 1117, 1, 0, 0, 0, 242, 1122, 1, 0, 0, 0, 244, 1131, 1, 0, 0, 0, 246, 1152, 1, 0, 0, 0, 248, 1156, 1, 0, 0, 0, 250, 1160, 1, 0, 0, 0, 252, 1164, 1, 0, 0, 0, 254, 1168, 1, 0, 0, 0, 256, 1172, 1, 0, 0, 0, 258, 1177, 1, 0, 0, 0, 260, 1181, 1, 0, 0, 0, 262, 1185, 1, 0, 0, 0, 264, 1189, 1, 0, 0, 0, 266, 1194, 1, 0, 0, 0, 268, 1199, 1, 0, 0, 0, 270, 1202, 1, 0, 0, 0, 272, 1206, 1, 0, 0, 0, 274, 1210, 1, 0, 0, 0, 276, 1214, 1, 0, 0, 0, 278, 1218, 1, 0, 0, 0, 280, 1223, 1, 0, 0, 0, 282, 1228, 1, 0, 0, 0, 284, 1233, 1, 0, 0, 0, 286, 1240, 1, 0, 0, 0, 288, 1249, 1, 0, 0, 0, 290, 1256, 1, 0, 0, 0, 292, 1260, 1, 0, 0, 0, 294, 1264, 1, 0, 0, 0, 296, 1268, 1, 0, 0, 0, 298, 1272, 1, 0, 0, 0, 300, 1278, 1, 0, 0, 0, 302, 1282, 1, 0, 0, 0, 304, 1286, 1, 0, 0, 0, 306, 1290, 1, 0, 0, 0, 308, 1294, 1, 0, 0, 0, 310, 1298, 1, 0, 0, 0, 312, 1302, 1, 0, 0, 0, 314, 1307, 1, 0, 0, 0, 316, 1312, 1, 0, 0, 0, 318, 1316, 1, 0, 0, 0, 320, 1320, 1, 0, 0, 0, 322, 1324, 1, 0, 0, 0, 324, 1329, 1, 0, 0, 0, 326, 1333, 1, 0, 0, 0, 328, 1338, 1, 0, 0, 0, 330, 1343, 1, 0, 0, 0, 332, 1347, 1, 0, 0, 0, 334, 1351, 1, 0, 0, 0, 336, 1355, 1, 0, 0, 0, 338, 1359, 1, 0, 0, 0, 340, 1363, 1, 0, 0, 0, 342, 1368, 1, 0, 0, 0, 344, 1373, 1, 0, 0, 0, 346, 1377, 1, 0, 0, 0, 348, 1381, 1, 0, 0, 0, 350, 1385, 1, 0, 0, 0, 352, 1390, 1, 0, 0, 0, 354, 1399, 1, 0, 0, 0, 356, 1403, 1, 0, 0, 0, 358, 1407, 1, 0, 0, 0, 360, 1411, 1, 0, 0, 0, 362, 1415, 1, 0, 0, 0, 364, 1420, 1, 0, 0, 0, 366, 1424, 1, 0, 0, 0, 368, 1428, 1, 0, 0, 0, 370, 1432, 1, 0, 0, 0, 372, 1437, 1, 0, 0, 0, 374, 1441, 1, 0, 0, 0, 376, 1445, 1, 0, 0, 0, 378, 1449, 1, 0, 0, 0, 380, 1453, 1, 0, 0, 0, 382, 1457, 1, 0, 0, 0, 384, 1463, 1, 0, 0, 0, 386, 1467, 1, 0, 0, 0, 388, 1471, 1, 0, 0, 0, 390, 1475, 1, 0, 0, 0, 392, 1479, 1, 0, 0, 0, 394, 1483, 1, 0, 0, 0, 396, 1487, 1, 0, 0, 0, 398, 1492, 1, 0, 0, 0, 400, 1496, 1, 0, 0, 0, 402, 1500, 1, 0, 0, 0, 404, 1506, 1, 0, 0, 0, 406, 1515, 1, 0, 0, 0, 408, 1519, 1, 0, 0, 0, 410, 1523, 1, 0, 0, 0, 412, 1527, 1, 0, 0, 0, 414, 1531, 1, 0, 0, 0, 416, 1535, 1, 0, 0, 0, 418, 1540, 1, 0, 0, 0, 420, 1546, 1, 0, 0, 0, 422, 1552, 1, 0, 0, 0, 424, 1556, 1, 0, 0, 0, 426, 1560, 1, 0, 0, 0, 428, 1564, 1, 0, 0, 0, 430, 1570, 1, 0, 0, 0, 432, 1576, 1, 0, 0, 0, 434, 1580, 1, 0, 0, 0, 436, 1584, 1, 0, 0, 0, 438, 1588, 1, 0, 0, 0, 440, 1594, 1, 0, 0, 0, 442, 1600, 1, 0, 0, 0, 444, 1606, 1, 0, 0, 0, 446, 447, 7, 0, 0, 0, 447, 448, 7, 1, 0, 0, 448, 449, 7, 2, 0, 0, 449, 450, 7, 2, 0, 0, 450, 451, 7, 3, 0, 0, 451, 452, 7, 4, 0, 0, 452, 453, 7, 5, 0, 0, 453, 454, 1, 0, 0, 0, 454, 455, 6, 0, 0, 0, 455, 17, 1, 0, 0, 0, 456, 457, 7, 0, 0, 0, 457, 458, 7, 6, 0, 0, 458, 459, 7, 7, 0, 0, 459, 460, 7, 8, 0, 0, 460, 461, 1, 0, 0, 0, 461, 462, 6, 1, 1, 0, 462, 19, 1, 0, 0, 0, 463, 464, 7, 3, 0, 0, 464, 465, 7, 9, 0, 0, 465, 466, 7, 6, 0, 0, 466, 467, 7, 1, 0, 0, 467, 468, 7, 4, 0, 0, 468, 469, 7, 10, 0, 0, 469, 470, 1, 0, 0, 0, 470, 471, 6, 2, 2, 0, 471, 21, 1, 0, 0, 0, 472, 473, 7, 3, 0, 0, 473, 474, 7, 11, 0, 0, 474, 475, 7, 12, 0, 0, 475, 476, 7, 13, 0, 0, 476, 477, 1, 0, 0, 0, 477, 478, 6, 3, 0, 0, 478, 23, 1, 0, 0, 0, 479, 480, 7, 3, 0, 0, 480, 481, 7, 14, 0, 0, 481, 482, 7, 8, 0, 0, 482, 483, 7, 13, 0, 0, 483, 484, 7, 12, 0, 0, 484, 485, 7, 1, 0, 0, 485, 486, 7, 9, 0, 0, 486, 487, 1, 0, 0, 0, 487, 488, 6, 4, 3, 0, 488, 25, 1, 0, 0, 0, 489, 490, 7, 15, 0, 0, 490, 491, 7, 6, 0, 0, 491, 492, 7, 7, 0, 0, 492, 493, 7, 16, 0, 0, 493, 494, 1, 0, 0, 0, 494, 495, 6, 5, 4, 0, 495, 27, 1, 0, 0, 0, 496, 497, 7, 17, 0, 0, 497, 498, 7, 6, 0, 0, 498, 499, 7, 7, 0, 0, 499, 500, 7, 18, 0, 0, 500, 501, 1, 0, 0, 0, 501, 502, 6, 6, 0, 0, 502, 29, 1, 0, 0, 0, 503, 504, 7, 18, 0, 0, 504, 505, 7, 3, 0, 0, 505, 506, 7, 3, 0, 0, 506, 507, 7, 8, 0, 0, 507, 508, 1, 0, 0, 0, 508, 509, 6, 7, 1, 0, 509, 31, 1, 0, 0, 0, 510, 511, 7, 13, 0, 0, 511, 512, 7, 1, 0, 0, 512, 513, 7, 16, 0, 0, 513, 514, 7, 1, 0, 0, 514, 515, 7, 5, 0, 0, 515, 516, 1, 0, 0, 0, 516, 517, 6, 8, 0, 0, 517, 33, 1, 0, 0, 0, 518, 519, 7, 16, 0, 0, 519, 520, 7, 11, 0, 0, 520, 521, 5, 95, 0, 0, 521, 522, 7, 3, 0, 0, 522, 523, 7, 14, 0, 0, 523, 524, 7, 8, 0, 0, 524, 525, 7, 12, 0, 0, 525, 526, 7, 9, 0, 0, 526, 527, 7, 0, 0, 0, 527, 528, 1, 0, 0, 0, 528, 529, 6, 9, 5, 0, 529, 35, 1, 0, 0, 0, 530, 531, 7, 6, 0, 0, 531, 532, 7, 3, 0, 0, 532, 533, 7, 9, 0, 0, 533, 534, 7, 12, 0, 0, 534, 535, 7, 16, 0, 0, 535, 536, 7, 3, 0, 0, 536, 537, 1, 0, 0, 0, 537, 538, 6, 10, 6, 0, 538, 37, 1, 0, 0, 0, 539, 540, 7, 6, 0, 0, 540, 541, 7, 7, 0, 0, 541, 542, 7, 19, 0, 0, 542, 543, 1, 0, 0, 0, 543, 544, 6, 11, 0, 0, 544, 39, 1, 0, 0, 0, 545, 546, 7, 2, 0, 0, 546, 547, 7, 10, 0, 0, 547, 548, 7, 7, 0, 0, 548, 549, 7, 19, 0, 0, 549, 550, 1, 0, 0, 0, 550, 551, 6, 12, 7, 0, 551, 41, 1, 0, 0, 0, 552, 553, 7, 2, 0, 0, 553, 554, 7, 7, 0, 0, 554, 555, 7, 6, 0, 0, 555, 556, 7, 5, 0, 0, 556, 557, 1, 0, 0, 0, 557, 558, 6, 13, 0, 0, 558, 43, 1, 0, 0, 0, 559, 560, 7, 2, 0, 0, 560, 561, 7, 5, 0, 0, 561, 562, 7, 12, 0, 0, 562, 563, 7, 5, 0, 0, 563, 564, 7, 2, 0, 0, 564, 565, 1, 0, 0, 0, 565, 566, 6, 14, 0, 0, 566, 45, 1, 0, 0, 0, 567, 568, 7, 19, 0, 0, 568, 569, 7, 10, 0, 0, 569, 570, 7, 3, 0, 0, 570, 571, 7, 6, 0, 0, 571, 572, 7, 3, 0, 0, 572, 573, 1, 0, 0, 0, 573, 574, 6, 15, 0, 0, 574, 47, 1, 0, 0, 0, 575, 576, 4, 16, 0, 0, 576, 577, 7, 1, 0, 0, 577, 578, 7, 9, 0, 0, 578, 579, 7, 13, 0, 0, 579, 580, 7, 1, 0, 0, 580, 581, 7, 9, 0, 0, 581, 582, 7, 3, 0, 0, 582, 583, 7, 2, 0, 0, 583, 584, 7, 5, 0, 0, 584, 585, 7, 12, 0, 0, 585, 586, 7, 5, 0, 0, 586, 587, 7, 2, 0, 0, 587, 588, 1, 0, 0, 0, 588, 589, 6, 16, 0, 0, 589, 49, 1, 0, 0, 0, 590, 591, 4, 17, 1, 0, 591, 592, 7, 13, 0, 0, 592, 593, 7, 7, 0, 0, 593, 594, 7, 7, 0, 0, 594, 595, 7, 18, 0, 0, 595, 596, 7, 20, 0, 0, 596, 597, 7, 8, 0, 0, 597, 598, 5, 95, 0, 0, 598, 599, 5, 128020, 0, 0, 599, 600, 1, 0, 0, 0, 600, 601, 6, 17, 8, 0, 601, 51, 1, 0, 0, 0, 602, 603, 4, 18, 2, 0, 603, 604, 7, 16, 0, 0, 604, 605, 7, 3, 0, 0, 605, 606, 7, 5, 0, 0, 606, 607, 7, 6, 0, 0, 607, 608, 7, 1, 0, 0, 608, 609, 7, 4, 0, 0, 609, 610, 7, 2, 0, 0, 610, 611, 1, 0, 0, 0, 611, 612, 6, 18, 9, 0, 612, 53, 1, 0, 0, 0, 613, 614, 4, 19, 3, 0, 614, 615, 7, 21, 0, 0, 615, 616, 7, 7, 0, 0, 616, 617, 7, 1, 0, 0, 617, 618, 7, 9, 0, 0, 618, 619, 1, 0, 0, 0, 619, 620, 6, 19, 10, 0, 620, 55, 1, 0, 0, 0, 621, 622, 4, 20, 4, 0, 622, 623, 7, 15, 0, 0, 623, 624, 7, 20, 0, 0, 624, 625, 7, 13, 0, 0, 625, 626, 7, 13, 0, 0, 626, 627, 1, 0, 0, 0, 627, 628, 6, 20, 10, 0, 628, 57, 1, 0, 0, 0, 629, 630, 4, 21, 5, 0, 630, 631, 7, 13, 0, 0, 631, 632, 7, 3, 0, 0, 632, 633, 7, 15, 0, 0, 633, 634, 7, 5, 0, 0, 634, 635, 1, 0, 0, 0, 635, 636, 6, 21, 10, 0, 636, 59, 1, 0, 0, 0, 637, 638, 4, 22, 6, 0, 638, 639, 7, 6, 0, 0, 639, 640, 7, 1, 0, 0, 640, 641, 7, 17, 0, 0, 641, 642, 7, 10, 0, 0, 642, 643, 7, 5, 0, 0, 643, 644, 1, 0, 0, 0, 644, 645, 6, 22, 10, 0, 645, 61, 1, 0, 0, 0, 646, 647, 4, 23, 7, 0, 647, 648, 7, 13, 0, 0, 648, 649, 7, 7, 0, 0, 649, 650, 7, 7, 0, 0, 650, 651, 7, 18, 0, 0, 651, 652, 7, 20, 0, 0, 652, 653, 7, 8, 0, 0, 653, 654, 1, 0, 0, 0, 654, 655, 6, 23, 10, 0, 655, 63, 1, 0, 0, 0, 656, 658, 8, 22, 0, 0, 657, 656, 1, 0, 0, 0, 658, 659, 1, 0, 0, 0, 659, 657, 1, 0, 0, 0, 659, 660, 1, 0, 0, 0, 660, 661, 1, 0, 0, 0, 661, 662, 6, 24, 0, 0, 662, 65, 1, 0, 0, 0, 663, 664, 5, 47, 0, 0, 664, 665, 5, 47, 0, 0, 665, 669, 1, 0, 0, 0, 666, 668, 8, 23, 0, 0, 667, 666, 1, 0, 0, 0, 668, 671, 1, 0, 0, 0, 669, 667, 1, 0, 0, 0, 669, 670, 1, 0, 0, 0, 670, 673, 1, 0, 0, 0, 671, 669, 1, 0, 0, 0, 672, 674, 5, 13, 0, 0, 673, 672, 1, 0, 0, 0, 673, 674, 1, 0, 0, 0, 674, 676, 1, 0, 0, 0, 675, 677, 5, 10, 0, 0, 676, 675, 1, 0, 0, 0, 676, 677, 1, 0, 0, 0, 677, 678, 1, 0, 0, 0, 678, 679, 6, 25, 11, 0, 679, 67, 1, 0, 0, 0, 680, 681, 5, 47, 0, 0, 681, 682, 5, 42, 0, 0, 682, 687, 1, 0, 0, 0, 683, 686, 3, 68, 26, 0, 684, 686, 9, 0, 0, 0, 685, 683, 1, 0, 0, 0, 685, 684, 1, 0, 0, 0, 686, 689, 1, 0, 0, 0, 687, 688, 1, 0, 0, 0, 687, 685, 1, 0, 0, 0, 688, 690, 1, 0, 0, 0, 689, 687, 1, 0, 0, 0, 690, 691, 5, 42, 0, 0, 691, 692, 5, 47, 0, 0, 692, 693, 1, 0, 0, 0, 693, 694, 6, 26, 11, 0, 694, 69, 1, 0, 0, 0, 695, 697, 7, 24, 0, 0, 696, 695, 1, 0, 0, 0, 697, 698, 1, 0, 0, 0, 698, 696, 1, 0, 0, 0, 698, 699, 1, 0, 0, 0, 699, 700, 1, 0, 0, 0, 700, 701, 6, 27, 11, 0, 701, 71, 1, 0, 0, 0, 702, 703, 5, 124, 0, 0, 703, 704, 1, 0, 0, 0, 704, 705, 6, 28, 12, 0, 705, 73, 1, 0, 0, 0, 706, 707, 7, 25, 0, 0, 707, 75, 1, 0, 0, 0, 708, 709, 7, 26, 0, 0, 709, 77, 1, 0, 0, 0, 710, 711, 5, 92, 0, 0, 711, 712, 7, 27, 0, 0, 712, 79, 1, 0, 0, 0, 713, 714, 8, 28, 0, 0, 714, 81, 1, 0, 0, 0, 715, 717, 7, 3, 0, 0, 716, 718, 7, 29, 0, 0, 717, 716, 1, 0, 0, 0, 717, 718, 1, 0, 0, 0, 718, 720, 1, 0, 0, 0, 719, 721, 3, 74, 29, 0, 720, 719, 1, 0, 0, 0, 721, 722, 1, 0, 0, 0, 722, 720, 1, 0, 0, 0, 722, 723, 1, 0, 0, 0, 723, 83, 1, 0, 0, 0, 724, 725, 5, 64, 0, 0, 725, 85, 1, 0, 0, 0, 726, 727, 5, 96, 0, 0, 727, 87, 1, 0, 0, 0, 728, 732, 8, 30, 0, 0, 729, 730, 5, 96, 0, 0, 730, 732, 5, 96, 0, 0, 731, 728, 1, 0, 0, 0, 731, 729, 1, 0, 0, 0, 732, 89, 1, 0, 0, 0, 733, 734, 5, 95, 0, 0, 734, 91, 1, 0, 0, 0, 735, 739, 3, 76, 30, 0, 736, 739, 3, 74, 29, 0, 737, 739, 3, 90, 37, 0, 738, 735, 1, 0, 0, 0, 738, 736, 1, 0, 0, 0, 738, 737, 1, 0, 0, 0, 739, 93, 1, 0, 0, 0, 740, 745, 5, 34, 0, 0, 741, 744, 3, 78, 31, 0, 742, 744, 3, 80, 32, 0, 743, 741, 1, 0, 0, 0, 743, 742, 1, 0, 0, 0, 744, 747, 1, 0, 0, 0, 745, 743, 1, 0, 0, 0, 745, 746, 1, 0, 0, 0, 746, 748, 1, 0, 0, 0, 747, 745, 1, 0, 0, 0, 748, 770, 5, 34, 0, 0, 749, 750, 5, 34, 0, 0, 750, 751, 5, 34, 0, 0, 751, 752, 5, 34, 0, 0, 752, 756, 1, 0, 0, 0, 753, 755, 8, 23, 0, 0, 754, 753, 1, 0, 0, 0, 755, 758, 1, 0, 0, 0, 756, 757, 1, 0, 0, 0, 756, 754, 1, 0, 0, 0, 757, 759, 1, 0, 0, 0, 758, 756, 1, 0, 0, 0, 759, 760, 5, 34, 0, 0, 760, 761, 5, 34, 0, 0, 761, 762, 5, 34, 0, 0, 762, 764, 1, 0, 0, 0, 763, 765, 5, 34, 0, 0, 764, 763, 1, 0, 0, 0, 764, 765, 1, 0, 0, 0, 765, 767, 1, 0, 0, 0, 766, 768, 5, 34, 0, 0, 767, 766, 1, 0, 0, 0, 767, 768, 1, 0, 0, 0, 768, 770, 1, 0, 0, 0, 769, 740, 1, 0, 0, 0, 769, 749, 1, 0, 0, 0, 770, 95, 1, 0, 0, 0, 771, 773, 3, 74, 29, 0, 772, 771, 1, 0, 0, 0, 773, 774, 1, 0, 0, 0, 774, 772, 1, 0, 0, 0, 774, 775, 1, 0, 0, 0, 775, 97, 1, 0, 0, 0, 776, 778, 3, 74, 29, 0, 777, 776, 1, 0, 0, 0, 778, 779, 1, 0, 0, 0, 779, 777, 1, 0, 0, 0, 779, 780, 1, 0, 0, 0, 780, 781, 1, 0, 0, 0, 781, 785, 3, 116, 50, 0, 782, 784, 3, 74, 29, 0, 783, 782, 1, 0, 0, 0, 784, 787, 1, 0, 0, 0, 785, 783, 1, 0, 0, 0, 785, 786, 1, 0, 0, 0, 786, 819, 1, 0, 0, 0, 787, 785, 1, 0, 0, 0, 788, 790, 3, 116, 50, 0, 789, 791, 3, 74, 29, 0, 790, 789, 1, 0, 0, 0, 791, 792, 1, 0, 0, 0, 792, 790, 1, 0, 0, 0, 792, 793, 1, 0, 0, 0, 793, 819, 1, 0, 0, 0, 794, 796, 3, 74, 29, 0, 795, 794, 1, 0, 0, 0, 796, 797, 1, 0, 0, 0, 797, 795, 1, 0, 0, 0, 797, 798, 1, 0, 0, 0, 798, 806, 1, 0, 0, 0, 799, 803, 3, 116, 50, 0, 800, 802, 3, 74, 29, 0, 801, 800, 1, 0, 0, 0, 802, 805, 1, 0, 0, 0, 803, 801, 1, 0, 0, 0, 803, 804, 1, 0, 0, 0, 804, 807, 1, 0, 0, 0, 805, 803, 1, 0, 0, 0, 806, 799, 1, 0, 0, 0, 806, 807, 1, 0, 0, 0, 807, 808, 1, 0, 0, 0, 808, 809, 3, 82, 33, 0, 809, 819, 1, 0, 0, 0, 810, 812, 3, 116, 50, 0, 811, 813, 3, 74, 29, 0, 812, 811, 1, 0, 0, 0, 813, 814, 1, 0, 0, 0, 814, 812, 1, 0, 0, 0, 814, 815, 1, 0, 0, 0, 815, 816, 1, 0, 0, 0, 816, 817, 3, 82, 33, 0, 817, 819, 1, 0, 0, 0, 818, 777, 1, 0, 0, 0, 818, 788, 1, 0, 0, 0, 818, 795, 1, 0, 0, 0, 818, 810, 1, 0, 0, 0, 819, 99, 1, 0, 0, 0, 820, 821, 7, 31, 0, 0, 821, 822, 7, 32, 0, 0, 822, 101, 1, 0, 0, 0, 823, 824, 7, 12, 0, 0, 824, 825, 7, 9, 0, 0, 825, 826, 7, 0, 0, 0, 826, 103, 1, 0, 0, 0, 827, 828, 7, 12, 0, 0, 828, 829, 7, 2, 0, 0, 829, 830, 7, 4, 0, 0, 830, 105, 1, 0, 0, 0, 831, 832, 5, 61, 0, 0, 832, 107, 1, 0, 0, 0, 833, 834, 5, 58, 0, 0, 834, 835, 5, 58, 0, 0, 835, 109, 1, 0, 0, 0, 836, 837, 5, 58, 0, 0, 837, 111, 1, 0, 0, 0, 838, 839, 5, 44, 0, 0, 839, 113, 1, 0, 0, 0, 840, 841, 7, 0, 0, 0, 841, 842, 7, 3, 0, 0, 842, 843, 7, 2, 0, 0, 843, 844, 7, 4, 0, 0, 844, 115, 1, 0, 0, 0, 845, 846, 5, 46, 0, 0, 846, 117, 1, 0, 0, 0, 847, 848, 7, 15, 0, 0, 848, 849, 7, 12, 0, 0, 849, 850, 7, 13, 0, 0, 850, 851, 7, 2, 0, 0, 851, 852, 7, 3, 0, 0, 852, 119, 1, 0, 0, 0, 853, 854, 7, 15, 0, 0, 854, 855, 7, 1, 0, 0, 855, 856, 7, 6, 0, 0, 856, 857, 7, 2, 0, 0, 857, 858, 7, 5, 0, 0, 858, 121, 1, 0, 0, 0, 859, 860, 7, 1, 0, 0, 860, 861, 7, 9, 0, 0, 861, 123, 1, 0, 0, 0, 862, 863, 7, 1, 0, 0, 863, 864, 7, 2, 0, 0, 864, 125, 1, 0, 0, 0, 865, 866, 7, 13, 0, 0, 866, 867, 7, 12, 0, 0, 867, 868, 7, 2, 0, 0, 868, 869, 7, 5, 0, 0, 869, 127, 1, 0, 0, 0, 870, 871, 7, 13, 0, 0, 871, 872, 7, 1, 0, 0, 872, 873, 7, 18, 0, 0, 873, 874, 7, 3, 0, 0, 874, 129, 1, 0, 0, 0, 875, 876, 5, 40, 0, 0, 876, 131, 1, 0, 0, 0, 877, 878, 7, 9, 0, 0, 878, 879, 7, 7, 0, 0, 879, 880, 7, 5, 0, 0, 880, 133, 1, 0, 0, 0, 881, 882, 7, 9, 0, 0, 882, 883, 7, 20, 0, 0, 883, 884, 7, 13, 0, 0, 884, 885, 7, 13, 0, 0, 885, 135, 1, 0, 0, 0, 886, 887, 7, 9, 0, 0, 887, 888, 7, 20, 0, 0, 888, 889, 7, 13, 0, 0, 889, 890, 7, 13, 0, 0, 890, 891, 7, 2, 0, 0, 891, 137, 1, 0, 0, 0, 892, 893, 7, 7, 0, 0, 893, 894, 7, 6, 0, 0, 894, 139, 1, 0, 0, 0, 895, 896, 5, 63, 0, 0, 896, 141, 1, 0, 0, 0, 897, 898, 7, 6, 0, 0, 898, 899, 7, 13, 0, 0, 899, 900, 7, 1, 0, 0, 900, 901, 7, 18, 0, 0, 901, 902, 7, 3, 0, 0, 902, 143, 1, 0, 0, 0, 903, 904, 5, 41, 0, 0, 904, 145, 1, 0, 0, 0, 905, 906, 7, 5, 0, 0, 906, 907, 7, 6, 0, 0, 907, 908, 7, 20, 0, 0, 908, 909, 7, 3, 0, 0, 909, 147, 1, 0, 0, 0, 910, 911, 5, 61, 0, 0, 911, 912, 5, 61, 0, 0, 912, 149, 1, 0, 0, 0, 913, 914, 5, 61, 0, 0, 914, 915, 5, 126, 0, 0, 915, 151, 1, 0, 0, 0, 916, 917, 5, 33, 0, 0, 917, 918, 5, 61, 0, 0, 918, 153, 1, 0, 0, 0, 919, 920, 5, 60, 0, 0, 920, 155, 1, 0, 0, 0, 921, 922, 5, 60, 0, 0, 922, 923, 5, 61, 0, 0, 923, 157, 1, 0, 0, 0, 924, 925, 5, 62, 0, 0, 925, 159, 1, 0, 0, 0, 926, 927, 5, 62, 0, 0, 927, 928, 5, 61, 0, 0, 928, 161, 1, 0, 0, 0, 929, 930, 5, 43, 0, 0, 930, 163, 1, 0, 0, 0, 931, 932, 5, 45, 0, 0, 932, 165, 1, 0, 0, 0, 933, 934, 5, 42, 0, 0, 934, 167, 1, 0, 0, 0, 935, 936, 5, 47, 0, 0, 936, 169, 1, 0, 0, 0, 937, 938, 5, 37, 0, 0, 938, 171, 1, 0, 0, 0, 939, 940, 4, 78, 8, 0, 940, 941, 5, 123, 0, 0, 941, 173, 1, 0, 0, 0, 942, 943, 4, 79, 9, 0, 943, 944, 5, 125, 0, 0, 944, 175, 1, 0, 0, 0, 945, 946, 3, 46, 15, 0, 946, 947, 1, 0, 0, 0, 947, 948, 6, 80, 13, 0, 948, 177, 1, 0, 0, 0, 949, 952, 3, 140, 62, 0, 950, 953, 3, 76, 30, 0, 951, 953, 3, 90, 37, 0, 952, 950, 1, 0, 0, 0, 952, 951, 1, 0, 0, 0, 953, 957, 1, 0, 0, 0, 954, 956, 3, 92, 38, 0, 955, 954, 1, 0, 0, 0, 956, 959, 1, 0, 0, 0, 957, 955, 1, 0, 0, 0, 957, 958, 1, 0, 0, 0, 958, 967, 1, 0, 0, 0, 959, 957, 1, 0, 0, 0, 960, 962, 3, 140, 62, 0, 961, 963, 3, 74, 29, 0, 962, 961, 1, 0, 0, 0, 963, 964, 1, 0, 0, 0, 964, 962, 1, 0, 0, 0, 964, 965, 1, 0, 0, 0, 965, 967, 1, 0, 0, 0, 966, 949, 1, 0, 0, 0, 966, 960, 1, 0, 0, 0, 967, 179, 1, 0, 0, 0, 968, 969, 5, 91, 0, 0, 969, 970, 1, 0, 0, 0, 970, 971, 6, 82, 0, 0, 971, 972, 6, 82, 0, 0, 972, 181, 1, 0, 0, 0, 973, 974, 5, 93, 0, 0, 974, 975, 1, 0, 0, 0, 975, 976, 6, 83, 12, 0, 976, 977, 6, 83, 12, 0, 977, 183, 1, 0, 0, 0, 978, 982, 3, 76, 30, 0, 979, 981, 3, 92, 38, 0, 980, 979, 1, 0, 0, 0, 981, 984, 1, 0, 0, 0, 982, 980, 1, 0, 0, 0, 982, 983, 1, 0, 0, 0, 983, 995, 1, 0, 0, 0, 984, 982, 1, 0, 0, 0, 985, 988, 3, 90, 37, 0, 986, 988, 3, 84, 34, 0, 987, 985, 1, 0, 0, 0, 987, 986, 1, 0, 0, 0, 988, 990, 1, 0, 0, 0, 989, 991, 3, 92, 38, 0, 990, 989, 1, 0, 0, 0, 991, 992, 1, 0, 0, 0, 992, 990, 1, 0, 0, 0, 992, 993, 1, 0, 0, 0, 993, 995, 1, 0, 0, 0, 994, 978, 1, 0, 0, 0, 994, 987, 1, 0, 0, 0, 995, 185, 1, 0, 0, 0, 996, 998, 3, 86, 35, 0, 997, 999, 3, 88, 36, 0, 998, 997, 1, 0, 0, 0, 999, 1000, 1, 0, 0, 0, 1000, 998, 1, 0, 0, 0, 1000, 1001, 1, 0, 0, 0, 1001, 1002, 1, 0, 0, 0, 1002, 1003, 3, 86, 35, 0, 1003, 187, 1, 0, 0, 0, 1004, 1005, 3, 186, 85, 0, 1005, 189, 1, 0, 0, 0, 1006, 1007, 3, 66, 25, 0, 1007, 1008, 1, 0, 0, 0, 1008, 1009, 6, 87, 11, 0, 1009, 191, 1, 0, 0, 0, 1010, 1011, 3, 68, 26, 0, 1011, 1012, 1, 0, 0, 0, 1012, 1013, 6, 88, 11, 0, 1013, 193, 1, 0, 0, 0, 1014, 1015, 3, 70, 27, 0, 1015, 1016, 1, 0, 0, 0, 1016, 1017, 6, 89, 11, 0, 1017, 195, 1, 0, 0, 0, 1018, 1019, 3, 180, 82, 0, 1019, 1020, 1, 0, 0, 0, 1020, 1021, 6, 90, 14, 0, 1021, 1022, 6, 90, 15, 0, 1022, 197, 1, 0, 0, 0, 1023, 1024, 3, 72, 28, 0, 1024, 1025, 1, 0, 0, 0, 1025, 1026, 6, 91, 16, 0, 1026, 1027, 6, 91, 12, 0, 1027, 199, 1, 0, 0, 0, 1028, 1029, 3, 70, 27, 0, 1029, 1030, 1, 0, 0, 0, 1030, 1031, 6, 92, 11, 0, 1031, 201, 1, 0, 0, 0, 1032, 1033, 3, 66, 25, 0, 1033, 1034, 1, 0, 0, 0, 1034, 1035, 6, 93, 11, 0, 1035, 203, 1, 0, 0, 0, 1036, 1037, 3, 68, 26, 0, 1037, 1038, 1, 0, 0, 0, 1038, 1039, 6, 94, 11, 0, 1039, 205, 1, 0, 0, 0, 1040, 1041, 3, 72, 28, 0, 1041, 1042, 1, 0, 0, 0, 1042, 1043, 6, 95, 16, 0, 1043, 1044, 6, 95, 12, 0, 1044, 207, 1, 0, 0, 0, 1045, 1046, 3, 180, 82, 0, 1046, 1047, 1, 0, 0, 0, 1047, 1048, 6, 96, 14, 0, 1048, 209, 1, 0, 0, 0, 1049, 1050, 3, 182, 83, 0, 1050, 1051, 1, 0, 0, 0, 1051, 1052, 6, 97, 17, 0, 1052, 211, 1, 0, 0, 0, 1053, 1054, 3, 110, 47, 0, 1054, 1055, 1, 0, 0, 0, 1055, 1056, 6, 98, 18, 0, 1056, 213, 1, 0, 0, 0, 1057, 1058, 3, 112, 48, 0, 1058, 1059, 1, 0, 0, 0, 1059, 1060, 6, 99, 19, 0, 1060, 215, 1, 0, 0, 0, 1061, 1062, 3, 106, 45, 0, 1062, 1063, 1, 0, 0, 0, 1063, 1064, 6, 100, 20, 0, 1064, 217, 1, 0, 0, 0, 1065, 1066, 7, 16, 0, 0, 1066, 1067, 7, 3, 0, 0, 1067, 1068, 7, 5, 0, 0, 1068, 1069, 7, 12, 0, 0, 1069, 1070, 7, 0, 0, 0, 1070, 1071, 7, 12, 0, 0, 1071, 1072, 7, 5, 0, 0, 1072, 1073, 7, 12, 0, 0, 1073, 219, 1, 0, 0, 0, 1074, 1078, 8, 33, 0, 0, 1075, 1076, 5, 47, 0, 0, 1076, 1078, 8, 34, 0, 0, 1077, 1074, 1, 0, 0, 0, 1077, 1075, 1, 0, 0, 0, 1078, 221, 1, 0, 0, 0, 1079, 1081, 3, 220, 102, 0, 1080, 1079, 1, 0, 0, 0, 1081, 1082, 1, 0, 0, 0, 1082, 1080, 1, 0, 0, 0, 1082, 1083, 1, 0, 0, 0, 1083, 223, 1, 0, 0, 0, 1084, 1085, 3, 222, 103, 0, 1085, 1086, 1, 0, 0, 0, 1086, 1087, 6, 104, 21, 0, 1087, 225, 1, 0, 0, 0, 1088, 1089, 3, 94, 39, 0, 1089, 1090, 1, 0, 0, 0, 1090, 1091, 6, 105, 22, 0, 1091, 227, 1, 0, 0, 0, 1092, 1093, 3, 66, 25, 0, 1093, 1094, 1, 0, 0, 0, 1094, 1095, 6, 106, 11, 0, 1095, 229, 1, 0, 0, 0, 1096, 1097, 3, 68, 26, 0, 1097, 1098, 1, 0, 0, 0, 1098, 1099, 6, 107, 11, 0, 1099, 231, 1, 0, 0, 0, 1100, 1101, 3, 70, 27, 0, 1101, 1102, 1, 0, 0, 0, 1102, 1103, 6, 108, 11, 0, 1103, 233, 1, 0, 0, 0, 1104, 1105, 3, 72, 28, 0, 1105, 1106, 1, 0, 0, 0, 1106, 1107, 6, 109, 16, 0, 1107, 1108, 6, 109, 12, 0, 1108, 235, 1, 0, 0, 0, 1109, 1110, 3, 116, 50, 0, 1110, 1111, 1, 0, 0, 0, 1111, 1112, 6, 110, 23, 0, 1112, 237, 1, 0, 0, 0, 1113, 1114, 3, 112, 48, 0, 1114, 1115, 1, 0, 0, 0, 1115, 1116, 6, 111, 19, 0, 1116, 239, 1, 0, 0, 0, 1117, 1118, 4, 112, 10, 0, 1118, 1119, 3, 140, 62, 0, 1119, 1120, 1, 0, 0, 0, 1120, 1121, 6, 112, 24, 0, 1121, 241, 1, 0, 0, 0, 1122, 1123, 4, 113, 11, 0, 1123, 1124, 3, 178, 81, 0, 1124, 1125, 1, 0, 0, 0, 1125, 1126, 6, 113, 25, 0, 1126, 243, 1, 0, 0, 0, 1127, 1132, 3, 76, 30, 0, 1128, 1132, 3, 74, 29, 0, 1129, 1132, 3, 90, 37, 0, 1130, 1132, 3, 166, 75, 0, 1131, 1127, 1, 0, 0, 0, 1131, 1128, 1, 0, 0, 0, 1131, 1129, 1, 0, 0, 0, 1131, 1130, 1, 0, 0, 0, 1132, 245, 1, 0, 0, 0, 1133, 1136, 3, 76, 30, 0, 1134, 1136, 3, 166, 75, 0, 1135, 1133, 1, 0, 0, 0, 1135, 1134, 1, 0, 0, 0, 1136, 1140, 1, 0, 0, 0, 1137, 1139, 3, 244, 114, 0, 1138, 1137, 1, 0, 0, 0, 1139, 1142, 1, 0, 0, 0, 1140, 1138, 1, 0, 0, 0, 1140, 1141, 1, 0, 0, 0, 1141, 1153, 1, 0, 0, 0, 1142, 1140, 1, 0, 0, 0, 1143, 1146, 3, 90, 37, 0, 1144, 1146, 3, 84, 34, 0, 1145, 1143, 1, 0, 0, 0, 1145, 1144, 1, 0, 0, 0, 1146, 1148, 1, 0, 0, 0, 1147, 1149, 3, 244, 114, 0, 1148, 1147, 1, 0, 0, 0, 1149, 1150, 1, 0, 0, 0, 1150, 1148, 1, 0, 0, 0, 1150, 1151, 1, 0, 0, 0, 1151, 1153, 1, 0, 0, 0, 1152, 1135, 1, 0, 0, 0, 1152, 1145, 1, 0, 0, 0, 1153, 247, 1, 0, 0, 0, 1154, 1157, 3, 246, 115, 0, 1155, 1157, 3, 186, 85, 0, 1156, 1154, 1, 0, 0, 0, 1156, 1155, 1, 0, 0, 0, 1157, 1158, 1, 0, 0, 0, 1158, 1156, 1, 0, 0, 0, 1158, 1159, 1, 0, 0, 0, 1159, 249, 1, 0, 0, 0, 1160, 1161, 3, 66, 25, 0, 1161, 1162, 1, 0, 0, 0, 1162, 1163, 6, 117, 11, 0, 1163, 251, 1, 0, 0, 0, 1164, 1165, 3, 68, 26, 0, 1165, 1166, 1, 0, 0, 0, 1166, 1167, 6, 118, 11, 0, 1167, 253, 1, 0, 0, 0, 1168, 1169, 3, 70, 27, 0, 1169, 1170, 1, 0, 0, 0, 1170, 1171, 6, 119, 11, 0, 1171, 255, 1, 0, 0, 0, 1172, 1173, 3, 72, 28, 0, 1173, 1174, 1, 0, 0, 0, 1174, 1175, 6, 120, 16, 0, 1175, 1176, 6, 120, 12, 0, 1176, 257, 1, 0, 0, 0, 1177, 1178, 3, 106, 45, 0, 1178, 1179, 1, 0, 0, 0, 1179, 1180, 6, 121, 20, 0, 1180, 259, 1, 0, 0, 0, 1181, 1182, 3, 112, 48, 0, 1182, 1183, 1, 0, 0, 0, 1183, 1184, 6, 122, 19, 0, 1184, 261, 1, 0, 0, 0, 1185, 1186, 3, 116, 50, 0, 1186, 1187, 1, 0, 0, 0, 1187, 1188, 6, 123, 23, 0, 1188, 263, 1, 0, 0, 0, 1189, 1190, 4, 124, 12, 0, 1190, 1191, 3, 140, 62, 0, 1191, 1192, 1, 0, 0, 0, 1192, 1193, 6, 124, 24, 0, 1193, 265, 1, 0, 0, 0, 1194, 1195, 4, 125, 13, 0, 1195, 1196, 3, 178, 81, 0, 1196, 1197, 1, 0, 0, 0, 1197, 1198, 6, 125, 25, 0, 1198, 267, 1, 0, 0, 0, 1199, 1200, 7, 12, 0, 0, 1200, 1201, 7, 2, 0, 0, 1201, 269, 1, 0, 0, 0, 1202, 1203, 3, 248, 116, 0, 1203, 1204, 1, 0, 0, 0, 1204, 1205, 6, 127, 26, 0, 1205, 271, 1, 0, 0, 0, 1206, 1207, 3, 66, 25, 0, 1207, 1208, 1, 0, 0, 0, 1208, 1209, 6, 128, 11, 0, 1209, 273, 1, 0, 0, 0, 1210, 1211, 3, 68, 26, 0, 1211, 1212, 1, 0, 0, 0, 1212, 1213, 6, 129, 11, 0, 1213, 275, 1, 0, 0, 0, 1214, 1215, 3, 70, 27, 0, 1215, 1216, 1, 0, 0, 0, 1216, 1217, 6, 130, 11, 0, 1217, 277, 1, 0, 0, 0, 1218, 1219, 3, 72, 28, 0, 1219, 1220, 1, 0, 0, 0, 1220, 1221, 6, 131, 16, 0, 1221, 1222, 6, 131, 12, 0, 1222, 279, 1, 0, 0, 0, 1223, 1224, 3, 180, 82, 0, 1224, 1225, 1, 0, 0, 0, 1225, 1226, 6, 132, 14, 0, 1226, 1227, 6, 132, 27, 0, 1227, 281, 1, 0, 0, 0, 1228, 1229, 7, 7, 0, 0, 1229, 1230, 7, 9, 0, 0, 1230, 1231, 1, 0, 0, 0, 1231, 1232, 6, 133, 28, 0, 1232, 283, 1, 0, 0, 0, 1233, 1234, 7, 19, 0, 0, 1234, 1235, 7, 1, 0, 0, 1235, 1236, 7, 5, 0, 0, 1236, 1237, 7, 10, 0, 0, 1237, 1238, 1, 0, 0, 0, 1238, 1239, 6, 134, 28, 0, 1239, 285, 1, 0, 0, 0, 1240, 1241, 8, 35, 0, 0, 1241, 287, 1, 0, 0, 0, 1242, 1244, 3, 286, 135, 0, 1243, 1242, 1, 0, 0, 0, 1244, 1245, 1, 0, 0, 0, 1245, 1243, 1, 0, 0, 0, 1245, 1246, 1, 0, 0, 0, 1246, 1247, 1, 0, 0, 0, 1247, 1248, 3, 110, 47, 0, 1248, 1250, 1, 0, 0, 0, 1249, 1243, 1, 0, 0, 0, 1249, 1250, 1, 0, 0, 0, 1250, 1252, 1, 0, 0, 0, 1251, 1253, 3, 286, 135, 0, 1252, 1251, 1, 0, 0, 0, 1253, 1254, 1, 0, 0, 0, 1254, 1252, 1, 0, 0, 0, 1254, 1255, 1, 0, 0, 0, 1255, 289, 1, 0, 0, 0, 1256, 1257, 3, 288, 136, 0, 1257, 1258, 1, 0, 0, 0, 1258, 1259, 6, 137, 29, 0, 1259, 291, 1, 0, 0, 0, 1260, 1261, 3, 66, 25, 0, 1261, 1262, 1, 0, 0, 0, 1262, 1263, 6, 138, 11, 0, 1263, 293, 1, 0, 0, 0, 1264, 1265, 3, 68, 26, 0, 1265, 1266, 1, 0, 0, 0, 1266, 1267, 6, 139, 11, 0, 1267, 295, 1, 0, 0, 0, 1268, 1269, 3, 70, 27, 0, 1269, 1270, 1, 0, 0, 0, 1270, 1271, 6, 140, 11, 0, 1271, 297, 1, 0, 0, 0, 1272, 1273, 3, 72, 28, 0, 1273, 1274, 1, 0, 0, 0, 1274, 1275, 6, 141, 16, 0, 1275, 1276, 6, 141, 12, 0, 1276, 1277, 6, 141, 12, 0, 1277, 299, 1, 0, 0, 0, 1278, 1279, 3, 106, 45, 0, 1279, 1280, 1, 0, 0, 0, 1280, 1281, 6, 142, 20, 0, 1281, 301, 1, 0, 0, 0, 1282, 1283, 3, 112, 48, 0, 1283, 1284, 1, 0, 0, 0, 1284, 1285, 6, 143, 19, 0, 1285, 303, 1, 0, 0, 0, 1286, 1287, 3, 116, 50, 0, 1287, 1288, 1, 0, 0, 0, 1288, 1289, 6, 144, 23, 0, 1289, 305, 1, 0, 0, 0, 1290, 1291, 3, 284, 134, 0, 1291, 1292, 1, 0, 0, 0, 1292, 1293, 6, 145, 30, 0, 1293, 307, 1, 0, 0, 0, 1294, 1295, 3, 248, 116, 0, 1295, 1296, 1, 0, 0, 0, 1296, 1297, 6, 146, 26, 0, 1297, 309, 1, 0, 0, 0, 1298, 1299, 3, 188, 86, 0, 1299, 1300, 1, 0, 0, 0, 1300, 1301, 6, 147, 31, 0, 1301, 311, 1, 0, 0, 0, 1302, 1303, 4, 148, 14, 0, 1303, 1304, 3, 140, 62, 0, 1304, 1305, 1, 0, 0, 0, 1305, 1306, 6, 148, 24, 0, 1306, 313, 1, 0, 0, 0, 1307, 1308, 4, 149, 15, 0, 1308, 1309, 3, 178, 81, 0, 1309, 1310, 1, 0, 0, 0, 1310, 1311, 6, 149, 25, 0, 1311, 315, 1, 0, 0, 0, 1312, 1313, 3, 66, 25, 0, 1313, 1314, 1, 0, 0, 0, 1314, 1315, 6, 150, 11, 0, 1315, 317, 1, 0, 0, 0, 1316, 1317, 3, 68, 26, 0, 1317, 1318, 1, 0, 0, 0, 1318, 1319, 6, 151, 11, 0, 1319, 319, 1, 0, 0, 0, 1320, 1321, 3, 70, 27, 0, 1321, 1322, 1, 0, 0, 0, 1322, 1323, 6, 152, 11, 0, 1323, 321, 1, 0, 0, 0, 1324, 1325, 3, 72, 28, 0, 1325, 1326, 1, 0, 0, 0, 1326, 1327, 6, 153, 16, 0, 1327, 1328, 6, 153, 12, 0, 1328, 323, 1, 0, 0, 0, 1329, 1330, 3, 116, 50, 0, 1330, 1331, 1, 0, 0, 0, 1331, 1332, 6, 154, 23, 0, 1332, 325, 1, 0, 0, 0, 1333, 1334, 4, 155, 16, 0, 1334, 1335, 3, 140, 62, 0, 1335, 1336, 1, 0, 0, 0, 1336, 1337, 6, 155, 24, 0, 1337, 327, 1, 0, 0, 0, 1338, 1339, 4, 156, 17, 0, 1339, 1340, 3, 178, 81, 0, 1340, 1341, 1, 0, 0, 0, 1341, 1342, 6, 156, 25, 0, 1342, 329, 1, 0, 0, 0, 1343, 1344, 3, 188, 86, 0, 1344, 1345, 1, 0, 0, 0, 1345, 1346, 6, 157, 31, 0, 1346, 331, 1, 0, 0, 0, 1347, 1348, 3, 184, 84, 0, 1348, 1349, 1, 0, 0, 0, 1349, 1350, 6, 158, 32, 0, 1350, 333, 1, 0, 0, 0, 1351, 1352, 3, 66, 25, 0, 1352, 1353, 1, 0, 0, 0, 1353, 1354, 6, 159, 11, 0, 1354, 335, 1, 0, 0, 0, 1355, 1356, 3, 68, 26, 0, 1356, 1357, 1, 0, 0, 0, 1357, 1358, 6, 160, 11, 0, 1358, 337, 1, 0, 0, 0, 1359, 1360, 3, 70, 27, 0, 1360, 1361, 1, 0, 0, 0, 1361, 1362, 6, 161, 11, 0, 1362, 339, 1, 0, 0, 0, 1363, 1364, 3, 72, 28, 0, 1364, 1365, 1, 0, 0, 0, 1365, 1366, 6, 162, 16, 0, 1366, 1367, 6, 162, 12, 0, 1367, 341, 1, 0, 0, 0, 1368, 1369, 7, 1, 0, 0, 1369, 1370, 7, 9, 0, 0, 1370, 1371, 7, 15, 0, 0, 1371, 1372, 7, 7, 0, 0, 1372, 343, 1, 0, 0, 0, 1373, 1374, 3, 66, 25, 0, 1374, 1375, 1, 0, 0, 0, 1375, 1376, 6, 164, 11, 0, 1376, 345, 1, 0, 0, 0, 1377, 1378, 3, 68, 26, 0, 1378, 1379, 1, 0, 0, 0, 1379, 1380, 6, 165, 11, 0, 1380, 347, 1, 0, 0, 0, 1381, 1382, 3, 70, 27, 0, 1382, 1383, 1, 0, 0, 0, 1383, 1384, 6, 166, 11, 0, 1384, 349, 1, 0, 0, 0, 1385, 1386, 3, 182, 83, 0, 1386, 1387, 1, 0, 0, 0, 1387, 1388, 6, 167, 17, 0, 1388, 1389, 6, 167, 12, 0, 1389, 351, 1, 0, 0, 0, 1390, 1391, 3, 110, 47, 0, 1391, 1392, 1, 0, 0, 0, 1392, 1393, 6, 168, 18, 0, 1393, 353, 1, 0, 0, 0, 1394, 1400, 3, 84, 34, 0, 1395, 1400, 3, 74, 29, 0, 1396, 1400, 3, 116, 50, 0, 1397, 1400, 3, 76, 30, 0, 1398, 1400, 3, 90, 37, 0, 1399, 1394, 1, 0, 0, 0, 1399, 1395, 1, 0, 0, 0, 1399, 1396, 1, 0, 0, 0, 1399, 1397, 1, 0, 0, 0, 1399, 1398, 1, 0, 0, 0, 1400, 1401, 1, 0, 0, 0, 1401, 1399, 1, 0, 0, 0, 1401, 1402, 1, 0, 0, 0, 1402, 355, 1, 0, 0, 0, 1403, 1404, 3, 66, 25, 0, 1404, 1405, 1, 0, 0, 0, 1405, 1406, 6, 170, 11, 0, 1406, 357, 1, 0, 0, 0, 1407, 1408, 3, 68, 26, 0, 1408, 1409, 1, 0, 0, 0, 1409, 1410, 6, 171, 11, 0, 1410, 359, 1, 0, 0, 0, 1411, 1412, 3, 70, 27, 0, 1412, 1413, 1, 0, 0, 0, 1413, 1414, 6, 172, 11, 0, 1414, 361, 1, 0, 0, 0, 1415, 1416, 3, 72, 28, 0, 1416, 1417, 1, 0, 0, 0, 1417, 1418, 6, 173, 16, 0, 1418, 1419, 6, 173, 12, 0, 1419, 363, 1, 0, 0, 0, 1420, 1421, 3, 110, 47, 0, 1421, 1422, 1, 0, 0, 0, 1422, 1423, 6, 174, 18, 0, 1423, 365, 1, 0, 0, 0, 1424, 1425, 3, 112, 48, 0, 1425, 1426, 1, 0, 0, 0, 1426, 1427, 6, 175, 19, 0, 1427, 367, 1, 0, 0, 0, 1428, 1429, 3, 116, 50, 0, 1429, 1430, 1, 0, 0, 0, 1430, 1431, 6, 176, 23, 0, 1431, 369, 1, 0, 0, 0, 1432, 1433, 3, 282, 133, 0, 1433, 1434, 1, 0, 0, 0, 1434, 1435, 6, 177, 33, 0, 1435, 1436, 6, 177, 34, 0, 1436, 371, 1, 0, 0, 0, 1437, 1438, 3, 222, 103, 0, 1438, 1439, 1, 0, 0, 0, 1439, 1440, 6, 178, 21, 0, 1440, 373, 1, 0, 0, 0, 1441, 1442, 3, 94, 39, 0, 1442, 1443, 1, 0, 0, 0, 1443, 1444, 6, 179, 22, 0, 1444, 375, 1, 0, 0, 0, 1445, 1446, 3, 66, 25, 0, 1446, 1447, 1, 0, 0, 0, 1447, 1448, 6, 180, 11, 0, 1448, 377, 1, 0, 0, 0, 1449, 1450, 3, 68, 26, 0, 1450, 1451, 1, 0, 0, 0, 1451, 1452, 6, 181, 11, 0, 1452, 379, 1, 0, 0, 0, 1453, 1454, 3, 70, 27, 0, 1454, 1455, 1, 0, 0, 0, 1455, 1456, 6, 182, 11, 0, 1456, 381, 1, 0, 0, 0, 1457, 1458, 3, 72, 28, 0, 1458, 1459, 1, 0, 0, 0, 1459, 1460, 6, 183, 16, 0, 1460, 1461, 6, 183, 12, 0, 1461, 1462, 6, 183, 12, 0, 1462, 383, 1, 0, 0, 0, 1463, 1464, 3, 112, 48, 0, 1464, 1465, 1, 0, 0, 0, 1465, 1466, 6, 184, 19, 0, 1466, 385, 1, 0, 0, 0, 1467, 1468, 3, 116, 50, 0, 1468, 1469, 1, 0, 0, 0, 1469, 1470, 6, 185, 23, 0, 1470, 387, 1, 0, 0, 0, 1471, 1472, 3, 248, 116, 0, 1472, 1473, 1, 0, 0, 0, 1473, 1474, 6, 186, 26, 0, 1474, 389, 1, 0, 0, 0, 1475, 1476, 3, 66, 25, 0, 1476, 1477, 1, 0, 0, 0, 1477, 1478, 6, 187, 11, 0, 1478, 391, 1, 0, 0, 0, 1479, 1480, 3, 68, 26, 0, 1480, 1481, 1, 0, 0, 0, 1481, 1482, 6, 188, 11, 0, 1482, 393, 1, 0, 0, 0, 1483, 1484, 3, 70, 27, 0, 1484, 1485, 1, 0, 0, 0, 1485, 1486, 6, 189, 11, 0, 1486, 395, 1, 0, 0, 0, 1487, 1488, 3, 72, 28, 0, 1488, 1489, 1, 0, 0, 0, 1489, 1490, 6, 190, 16, 0, 1490, 1491, 6, 190, 12, 0, 1491, 397, 1, 0, 0, 0, 1492, 1493, 3, 54, 19, 0, 1493, 1494, 1, 0, 0, 0, 1494, 1495, 6, 191, 35, 0, 1495, 399, 1, 0, 0, 0, 1496, 1497, 3, 268, 126, 0, 1497, 1498, 1, 0, 0, 0, 1498, 1499, 6, 192, 36, 0, 1499, 401, 1, 0, 0, 0, 1500, 1501, 3, 282, 133, 0, 1501, 1502, 1, 0, 0, 0, 1502, 1503, 6, 193, 33, 0, 1503, 1504, 6, 193, 12, 0, 1504, 1505, 6, 193, 0, 0, 1505, 403, 1, 0, 0, 0, 1506, 1507, 7, 20, 0, 0, 1507, 1508, 7, 2, 0, 0, 1508, 1509, 7, 1, 0, 0, 1509, 1510, 7, 9, 0, 0, 1510, 1511, 7, 17, 0, 0, 1511, 1512, 1, 0, 0, 0, 1512, 1513, 6, 194, 12, 0, 1513, 1514, 6, 194, 0, 0, 1514, 405, 1, 0, 0, 0, 1515, 1516, 3, 184, 84, 0, 1516, 1517, 1, 0, 0, 0, 1517, 1518, 6, 195, 32, 0, 1518, 407, 1, 0, 0, 0, 1519, 1520, 3, 188, 86, 0, 1520, 1521, 1, 0, 0, 0, 1521, 1522, 6, 196, 31, 0, 1522, 409, 1, 0, 0, 0, 1523, 1524, 3, 66, 25, 0, 1524, 1525, 1, 0, 0, 0, 1525, 1526, 6, 197, 11, 0, 1526, 411, 1, 0, 0, 0, 1527, 1528, 3, 68, 26, 0, 1528, 1529, 1, 0, 0, 0, 1529, 1530, 6, 198, 11, 0, 1530, 413, 1, 0, 0, 0, 1531, 1532, 3, 70, 27, 0, 1532, 1533, 1, 0, 0, 0, 1533, 1534, 6, 199, 11, 0, 1534, 415, 1, 0, 0, 0, 1535, 1536, 3, 72, 28, 0, 1536, 1537, 1, 0, 0, 0, 1537, 1538, 6, 200, 16, 0, 1538, 1539, 6, 200, 12, 0, 1539, 417, 1, 0, 0, 0, 1540, 1541, 3, 222, 103, 0, 1541, 1542, 1, 0, 0, 0, 1542, 1543, 6, 201, 21, 0, 1543, 1544, 6, 201, 12, 0, 1544, 1545, 6, 201, 37, 0, 1545, 419, 1, 0, 0, 0, 1546, 1547, 3, 94, 39, 0, 1547, 1548, 1, 0, 0, 0, 1548, 1549, 6, 202, 22, 0, 1549, 1550, 6, 202, 12, 0, 1550, 1551, 6, 202, 37, 0, 1551, 421, 1, 0, 0, 0, 1552, 1553, 3, 66, 25, 0, 1553, 1554, 1, 0, 0, 0, 1554, 1555, 6, 203, 11, 0, 1555, 423, 1, 0, 0, 0, 1556, 1557, 3, 68, 26, 0, 1557, 1558, 1, 0, 0, 0, 1558, 1559, 6, 204, 11, 0, 1559, 425, 1, 0, 0, 0, 1560, 1561, 3, 70, 27, 0, 1561, 1562, 1, 0, 0, 0, 1562, 1563, 6, 205, 11, 0, 1563, 427, 1, 0, 0, 0, 1564, 1565, 3, 110, 47, 0, 1565, 1566, 1, 0, 0, 0, 1566, 1567, 6, 206, 18, 0, 1567, 1568, 6, 206, 12, 0, 1568, 1569, 6, 206, 9, 0, 1569, 429, 1, 0, 0, 0, 1570, 1571, 3, 112, 48, 0, 1571, 1572, 1, 0, 0, 0, 1572, 1573, 6, 207, 19, 0, 1573, 1574, 6, 207, 12, 0, 1574, 1575, 6, 207, 9, 0, 1575, 431, 1, 0, 0, 0, 1576, 1577, 3, 66, 25, 0, 1577, 1578, 1, 0, 0, 0, 1578, 1579, 6, 208, 11, 0, 1579, 433, 1, 0, 0, 0, 1580, 1581, 3, 68, 26, 0, 1581, 1582, 1, 0, 0, 0, 1582, 1583, 6, 209, 11, 0, 1583, 435, 1, 0, 0, 0, 1584, 1585, 3, 70, 27, 0, 1585, 1586, 1, 0, 0, 0, 1586, 1587, 6, 210, 11, 0, 1587, 437, 1, 0, 0, 0, 1588, 1589, 3, 188, 86, 0, 1589, 1590, 1, 0, 0, 0, 1590, 1591, 6, 211, 12, 0, 1591, 1592, 6, 211, 0, 0, 1592, 1593, 6, 211, 31, 0, 1593, 439, 1, 0, 0, 0, 1594, 1595, 3, 184, 84, 0, 1595, 1596, 1, 0, 0, 0, 1596, 1597, 6, 212, 12, 0, 1597, 1598, 6, 212, 0, 0, 1598, 1599, 6, 212, 32, 0, 1599, 441, 1, 0, 0, 0, 1600, 1601, 3, 100, 42, 0, 1601, 1602, 1, 0, 0, 0, 1602, 1603, 6, 213, 12, 0, 1603, 1604, 6, 213, 0, 0, 1604, 1605, 6, 213, 38, 0, 1605, 443, 1, 0, 0, 0, 1606, 1607, 3, 72, 28, 0, 1607, 1608, 1, 0, 0, 0, 1608, 1609, 6, 214, 16, 0, 1609, 1610, 6, 214, 12, 0, 1610, 445, 1, 0, 0, 0, 66, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 659, 669, 673, 676, 685, 687, 698, 717, 722, 731, 738, 743, 745, 756, 764, 767, 769, 774, 779, 785, 792, 797, 803, 806, 814, 818, 952, 957, 964, 966, 982, 987, 992, 994, 1000, 1077, 1082, 1131, 1135, 1140, 1145, 1150, 1152, 1156, 1158, 1245, 1249, 1254, 1399, 1401, 39, 5, 1, 0, 5, 4, 0, 5, 6, 0, 5, 2, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 5, 11, 0, 5, 14, 0, 5, 13, 0, 0, 1, 0, 4, 0, 0, 7, 16, 0, 7, 72, 0, 5, 0, 0, 7, 29, 0, 7, 73, 0, 7, 38, 0, 7, 39, 0, 7, 36, 0, 7, 83, 0, 7, 30, 0, 7, 41, 0, 7, 53, 0, 7, 71, 0, 7, 87, 0, 5, 10, 0, 5, 7, 0, 7, 97, 0, 7, 96, 0, 7, 75, 0, 7, 74, 0, 7, 95, 0, 5, 12, 0, 7, 20, 0, 7, 91, 0, 5, 15, 0, 7, 33, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index f04582e820e28..737f0465e1ab6 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -35,23 +35,23 @@ public class EsqlBaseLexer extends LexerConfig { FIRST=43, IN=44, IS=45, LAST=46, LIKE=47, LP=48, NOT=49, NULL=50, NULLS=51, OR=52, PARAM=53, RLIKE=54, RP=55, TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, LTE=61, GT=62, GTE=63, PLUS=64, MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, - NAMED_OR_POSITIONAL_PARAM=69, OPENING_BRACKET=70, CLOSING_BRACKET=71, - UNQUOTED_IDENTIFIER=72, QUOTED_IDENTIFIER=73, EXPR_LINE_COMMENT=74, EXPR_MULTILINE_COMMENT=75, - EXPR_WS=76, EXPLAIN_WS=77, EXPLAIN_LINE_COMMENT=78, EXPLAIN_MULTILINE_COMMENT=79, - METADATA=80, UNQUOTED_SOURCE=81, FROM_LINE_COMMENT=82, FROM_MULTILINE_COMMENT=83, - FROM_WS=84, ID_PATTERN=85, PROJECT_LINE_COMMENT=86, PROJECT_MULTILINE_COMMENT=87, - PROJECT_WS=88, AS=89, RENAME_LINE_COMMENT=90, RENAME_MULTILINE_COMMENT=91, - RENAME_WS=92, ON=93, WITH=94, ENRICH_POLICY_NAME=95, ENRICH_LINE_COMMENT=96, - ENRICH_MULTILINE_COMMENT=97, ENRICH_WS=98, ENRICH_FIELD_LINE_COMMENT=99, - ENRICH_FIELD_MULTILINE_COMMENT=100, ENRICH_FIELD_WS=101, MVEXPAND_LINE_COMMENT=102, - MVEXPAND_MULTILINE_COMMENT=103, MVEXPAND_WS=104, INFO=105, SHOW_LINE_COMMENT=106, - SHOW_MULTILINE_COMMENT=107, SHOW_WS=108, SETTING=109, SETTING_LINE_COMMENT=110, - SETTTING_MULTILINE_COMMENT=111, SETTING_WS=112, LOOKUP_LINE_COMMENT=113, - LOOKUP_MULTILINE_COMMENT=114, LOOKUP_WS=115, LOOKUP_FIELD_LINE_COMMENT=116, - LOOKUP_FIELD_MULTILINE_COMMENT=117, LOOKUP_FIELD_WS=118, USING=119, JOIN_LINE_COMMENT=120, - JOIN_MULTILINE_COMMENT=121, JOIN_WS=122, METRICS_LINE_COMMENT=123, METRICS_MULTILINE_COMMENT=124, - METRICS_WS=125, CLOSING_METRICS_LINE_COMMENT=126, CLOSING_METRICS_MULTILINE_COMMENT=127, - CLOSING_METRICS_WS=128; + LEFT_BRACES=69, RIGHT_BRACES=70, NAMED_OR_POSITIONAL_PARAM=71, OPENING_BRACKET=72, + CLOSING_BRACKET=73, UNQUOTED_IDENTIFIER=74, QUOTED_IDENTIFIER=75, EXPR_LINE_COMMENT=76, + EXPR_MULTILINE_COMMENT=77, EXPR_WS=78, EXPLAIN_WS=79, EXPLAIN_LINE_COMMENT=80, + EXPLAIN_MULTILINE_COMMENT=81, METADATA=82, UNQUOTED_SOURCE=83, FROM_LINE_COMMENT=84, + FROM_MULTILINE_COMMENT=85, FROM_WS=86, ID_PATTERN=87, PROJECT_LINE_COMMENT=88, + PROJECT_MULTILINE_COMMENT=89, PROJECT_WS=90, AS=91, RENAME_LINE_COMMENT=92, + RENAME_MULTILINE_COMMENT=93, RENAME_WS=94, ON=95, WITH=96, ENRICH_POLICY_NAME=97, + ENRICH_LINE_COMMENT=98, ENRICH_MULTILINE_COMMENT=99, ENRICH_WS=100, ENRICH_FIELD_LINE_COMMENT=101, + ENRICH_FIELD_MULTILINE_COMMENT=102, ENRICH_FIELD_WS=103, MVEXPAND_LINE_COMMENT=104, + MVEXPAND_MULTILINE_COMMENT=105, MVEXPAND_WS=106, INFO=107, SHOW_LINE_COMMENT=108, + SHOW_MULTILINE_COMMENT=109, SHOW_WS=110, SETTING=111, SETTING_LINE_COMMENT=112, + SETTTING_MULTILINE_COMMENT=113, SETTING_WS=114, LOOKUP_LINE_COMMENT=115, + LOOKUP_MULTILINE_COMMENT=116, LOOKUP_WS=117, LOOKUP_FIELD_LINE_COMMENT=118, + LOOKUP_FIELD_MULTILINE_COMMENT=119, LOOKUP_FIELD_WS=120, USING=121, JOIN_LINE_COMMENT=122, + JOIN_MULTILINE_COMMENT=123, JOIN_WS=124, METRICS_LINE_COMMENT=125, METRICS_MULTILINE_COMMENT=126, + METRICS_WS=127, CLOSING_METRICS_LINE_COMMENT=128, CLOSING_METRICS_MULTILINE_COMMENT=129, + CLOSING_METRICS_WS=130; public static final int EXPRESSION_MODE=1, EXPLAIN_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9, SETTING_MODE=10, @@ -80,15 +80,15 @@ private static String[] makeRuleNames() { "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", - "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", "QUOTED_IDENTIFIER", - "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_OPENING_BRACKET", - "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", "FROM_COLON", - "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", "UNQUOTED_SOURCE", - "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", "PROJECT_PARAM", - "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", + "SLASH", "PERCENT", "LEFT_BRACES", "RIGHT_BRACES", "NESTED_WHERE", "NAMED_OR_POSITIONAL_PARAM", + "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_ID", + "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", + "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", + "EXPLAIN_MULTILINE_COMMENT", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", + "FROM_COLON", "FROM_COMMA", "FROM_ASSIGN", "METADATA", "UNQUOTED_SOURCE_PART", + "UNQUOTED_SOURCE", "FROM_UNQUOTED_SOURCE", "FROM_QUOTED_SOURCE", "FROM_LINE_COMMENT", + "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", + "PROJECT_PARAM", "PROJECT_NAMED_OR_POSITIONAL_PARAM", "UNQUOTED_ID_BODY_WITH_PATTERN", "UNQUOTED_ID_PATTERN", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "RENAME_PIPE", "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "RENAME_PARAM", "RENAME_NAMED_OR_POSITIONAL_PARAM", "AS", "RENAME_ID_PATTERN", @@ -130,7 +130,7 @@ private static String[] makeLiteralNames() { "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, - "']'", null, null, null, null, null, null, null, null, "'metadata'", + null, null, "']'", null, null, null, null, null, null, null, null, "'metadata'", null, null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, null, null, null, null, null, "'info'", null, null, null, null, null, null, null, null, null, @@ -149,23 +149,23 @@ private static String[] makeSymbolicNames() { "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", - "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", - "LOOKUP_FIELD_WS", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", - "JOIN_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", - "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", - "CLOSING_METRICS_WS" + "SLASH", "PERCENT", "LEFT_BRACES", "RIGHT_BRACES", "NAMED_OR_POSITIONAL_PARAM", + "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", + "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", + "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "METADATA", "UNQUOTED_SOURCE", + "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", + "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", + "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", + "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", + "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", + "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", + "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", + "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", + "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", + "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", + "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_LINE_COMMENT", + "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", + "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -246,21 +246,25 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return DEV_JOIN_RIGHT_sempred((RuleContext)_localctx, predIndex); case 23: return DEV_JOIN_LOOKUP_sempred((RuleContext)_localctx, predIndex); - case 110: + case 78: + return LEFT_BRACES_sempred((RuleContext)_localctx, predIndex); + case 79: + return RIGHT_BRACES_sempred((RuleContext)_localctx, predIndex); + case 112: return PROJECT_PARAM_sempred((RuleContext)_localctx, predIndex); - case 111: + case 113: return PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 122: + case 124: return RENAME_PARAM_sempred((RuleContext)_localctx, predIndex); - case 123: + case 125: return RENAME_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 146: + case 148: return ENRICH_FIELD_PARAM_sempred((RuleContext)_localctx, predIndex); - case 147: + case 149: return ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); - case 153: + case 155: return MVEXPAND_PARAM_sempred((RuleContext)_localctx, predIndex); - case 154: + case 156: return MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred((RuleContext)_localctx, predIndex); } return true; @@ -321,65 +325,79 @@ private boolean DEV_JOIN_LOOKUP_sempred(RuleContext _localctx, int predIndex) { } return true; } - private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean LEFT_BRACES_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 8: return this.isDevVersion(); } return true; } - private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean RIGHT_BRACES_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 9: return this.isDevVersion(); } return true; } - private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean PROJECT_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 10: return this.isDevVersion(); } return true; } - private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean PROJECT_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 11: return this.isDevVersion(); } return true; } - private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean RENAME_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 12: return this.isDevVersion(); } return true; } - private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean RENAME_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 13: return this.isDevVersion(); } return true; } - private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean ENRICH_FIELD_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 14: return this.isDevVersion(); } return true; } - private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + private boolean ENRICH_FIELD_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { switch (predIndex) { case 15: return this.isDevVersion(); } return true; } + private boolean MVEXPAND_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 16: + return this.isDevVersion(); + } + return true; + } + private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx, int predIndex) { + switch (predIndex) { + case 17: + return this.isDevVersion(); + } + return true; + } public static final String _serializedATN = - "\u0004\u0000\u0080\u0641\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ + "\u0004\u0000\u0082\u064b\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ "\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff"+ @@ -441,194 +459,196 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u00cb\u0002\u00cc\u0007\u00cc\u0002\u00cd\u0007\u00cd\u0002\u00ce\u0007"+ "\u00ce\u0002\u00cf\u0007\u00cf\u0002\u00d0\u0007\u00d0\u0002\u00d1\u0007"+ "\u00d1\u0002\u00d2\u0007\u00d2\u0002\u00d3\u0007\u00d3\u0002\u00d4\u0007"+ - "\u00d4\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ - "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u00d4\u0002\u00d5\u0007\u00d5\u0002\u00d6\u0007\u00d6\u0001\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001"+ "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001"+ + "\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001"+ - "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b"+ - "\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001"+ - "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001"+ - "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001"+ - "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b"+ + "\u0001\b\u0001\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ + "\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001"+ + "\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001"+ "\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001"+ - "\u000f\u0001\u000f\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001"+ - "\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001"+ + "\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001"+ - "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+ + "\u0012\u0001\u0012\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001"+ + "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001"+ "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ - "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001"+ - "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ - "\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0004"+ - "\u0018\u028e\b\u0018\u000b\u0018\f\u0018\u028f\u0001\u0018\u0001\u0018"+ - "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u0298\b\u0019"+ - "\n\u0019\f\u0019\u029b\t\u0019\u0001\u0019\u0003\u0019\u029e\b\u0019\u0001"+ - "\u0019\u0003\u0019\u02a1\b\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0005\u001a\u02aa\b\u001a\n"+ - "\u001a\f\u001a\u02ad\t\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ - "\u001a\u0001\u001a\u0001\u001b\u0004\u001b\u02b5\b\u001b\u000b\u001b\f"+ - "\u001b\u02b6\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001c"+ - "\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001f"+ - "\u0001\u001f\u0001\u001f\u0001 \u0001 \u0001!\u0001!\u0003!\u02ca\b!\u0001"+ - "!\u0004!\u02cd\b!\u000b!\f!\u02ce\u0001\"\u0001\"\u0001#\u0001#\u0001"+ - "$\u0001$\u0001$\u0003$\u02d8\b$\u0001%\u0001%\u0001&\u0001&\u0001&\u0003"+ - "&\u02df\b&\u0001\'\u0001\'\u0001\'\u0005\'\u02e4\b\'\n\'\f\'\u02e7\t\'"+ - "\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0005\'\u02ef\b\'\n\'"+ - "\f\'\u02f2\t\'\u0001\'\u0001\'\u0001\'\u0001\'\u0001\'\u0003\'\u02f9\b"+ - "\'\u0001\'\u0003\'\u02fc\b\'\u0003\'\u02fe\b\'\u0001(\u0004(\u0301\b("+ - "\u000b(\f(\u0302\u0001)\u0004)\u0306\b)\u000b)\f)\u0307\u0001)\u0001)"+ - "\u0005)\u030c\b)\n)\f)\u030f\t)\u0001)\u0001)\u0004)\u0313\b)\u000b)\f"+ - ")\u0314\u0001)\u0004)\u0318\b)\u000b)\f)\u0319\u0001)\u0001)\u0005)\u031e"+ - "\b)\n)\f)\u0321\t)\u0003)\u0323\b)\u0001)\u0001)\u0001)\u0001)\u0004)"+ - "\u0329\b)\u000b)\f)\u032a\u0001)\u0001)\u0003)\u032f\b)\u0001*\u0001*"+ - "\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0001"+ - "-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001/\u00010\u00010\u00011\u0001"+ - "1\u00011\u00011\u00011\u00012\u00012\u00013\u00013\u00013\u00013\u0001"+ - "3\u00013\u00014\u00014\u00014\u00014\u00014\u00014\u00015\u00015\u0001"+ - "5\u00016\u00016\u00016\u00017\u00017\u00017\u00017\u00017\u00018\u0001"+ - "8\u00018\u00018\u00018\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001"+ - ";\u0001;\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001<\u0001<\u0001"+ - "<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001?\u0001?\u0001?\u0001?\u0001"+ - "?\u0001?\u0001@\u0001@\u0001A\u0001A\u0001A\u0001A\u0001A\u0001B\u0001"+ - "B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001E\u0001"+ - "F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001H\u0001I\u0001I\u0001"+ - "J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001M\u0001M\u0001N\u0001N\u0001"+ - "N\u0001N\u0001O\u0001O\u0001O\u0003O\u03af\bO\u0001O\u0005O\u03b2\bO\n"+ - "O\fO\u03b5\tO\u0001O\u0001O\u0004O\u03b9\bO\u000bO\fO\u03ba\u0003O\u03bd"+ - "\bO\u0001P\u0001P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001Q\u0001Q\u0001"+ - "Q\u0001R\u0001R\u0005R\u03cb\bR\nR\fR\u03ce\tR\u0001R\u0001R\u0003R\u03d2"+ - "\bR\u0001R\u0004R\u03d5\bR\u000bR\fR\u03d6\u0003R\u03d9\bR\u0001S\u0001"+ - "S\u0004S\u03dd\bS\u000bS\fS\u03de\u0001S\u0001S\u0001T\u0001T\u0001U\u0001"+ - "U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001W\u0001W\u0001W\u0001"+ - "W\u0001X\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001Y\u0001"+ - "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001\\\u0001"+ - "\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001]\u0001^\u0001^\u0001"+ - "^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001`\u0001`\u0001"+ - "a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001c\u0001c\u0001"+ - "c\u0001c\u0001c\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0003"+ - "d\u042c\bd\u0001e\u0004e\u042f\be\u000be\fe\u0430\u0001f\u0001f\u0001"+ - "f\u0001f\u0001g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001"+ - "i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001k\u0001k\u0001"+ - "k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001"+ - "m\u0001n\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001o\u0001"+ - "o\u0001p\u0001p\u0001p\u0001p\u0003p\u0462\bp\u0001q\u0001q\u0003q\u0466"+ - "\bq\u0001q\u0005q\u0469\bq\nq\fq\u046c\tq\u0001q\u0001q\u0003q\u0470\b"+ - "q\u0001q\u0004q\u0473\bq\u000bq\fq\u0474\u0003q\u0477\bq\u0001r\u0001"+ - "r\u0004r\u047b\br\u000br\fr\u047c\u0001s\u0001s\u0001s\u0001s\u0001t\u0001"+ - "t\u0001t\u0001t\u0001u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001"+ - "v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001"+ - "y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001z\u0001{\u0001"+ - "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001"+ - "}\u0001~\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001"+ - "\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001"+ + "\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001\u0015\u0001"+ + "\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001"+ + "\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ + "\u0017\u0001\u0017\u0001\u0017\u0001\u0018\u0004\u0018\u0292\b\u0018\u000b"+ + "\u0018\f\u0018\u0293\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0001\u0019\u0005\u0019\u029c\b\u0019\n\u0019\f\u0019\u029f\t\u0019"+ + "\u0001\u0019\u0003\u0019\u02a2\b\u0019\u0001\u0019\u0003\u0019\u02a5\b"+ + "\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001"+ + "\u001a\u0001\u001a\u0005\u001a\u02ae\b\u001a\n\u001a\f\u001a\u02b1\t\u001a"+ + "\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b"+ + "\u0004\u001b\u02b9\b\u001b\u000b\u001b\f\u001b\u02ba\u0001\u001b\u0001"+ + "\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001d\u0001"+ + "\u001d\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0001"+ + " \u0001 \u0001!\u0001!\u0003!\u02ce\b!\u0001!\u0004!\u02d1\b!\u000b!\f"+ + "!\u02d2\u0001\"\u0001\"\u0001#\u0001#\u0001$\u0001$\u0001$\u0003$\u02dc"+ + "\b$\u0001%\u0001%\u0001&\u0001&\u0001&\u0003&\u02e3\b&\u0001\'\u0001\'"+ + "\u0001\'\u0005\'\u02e8\b\'\n\'\f\'\u02eb\t\'\u0001\'\u0001\'\u0001\'\u0001"+ + "\'\u0001\'\u0001\'\u0005\'\u02f3\b\'\n\'\f\'\u02f6\t\'\u0001\'\u0001\'"+ + "\u0001\'\u0001\'\u0001\'\u0003\'\u02fd\b\'\u0001\'\u0003\'\u0300\b\'\u0003"+ + "\'\u0302\b\'\u0001(\u0004(\u0305\b(\u000b(\f(\u0306\u0001)\u0004)\u030a"+ + "\b)\u000b)\f)\u030b\u0001)\u0001)\u0005)\u0310\b)\n)\f)\u0313\t)\u0001"+ + ")\u0001)\u0004)\u0317\b)\u000b)\f)\u0318\u0001)\u0004)\u031c\b)\u000b"+ + ")\f)\u031d\u0001)\u0001)\u0005)\u0322\b)\n)\f)\u0325\t)\u0003)\u0327\b"+ + ")\u0001)\u0001)\u0001)\u0001)\u0004)\u032d\b)\u000b)\f)\u032e\u0001)\u0001"+ + ")\u0003)\u0333\b)\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001"+ + ",\u0001,\u0001,\u0001,\u0001-\u0001-\u0001.\u0001.\u0001.\u0001/\u0001"+ + "/\u00010\u00010\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u0001"+ + "3\u00013\u00013\u00013\u00013\u00013\u00014\u00014\u00014\u00014\u0001"+ + "4\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u00017\u00017\u0001"+ + "7\u00017\u00017\u00018\u00018\u00018\u00018\u00018\u00019\u00019\u0001"+ + ":\u0001:\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0001;\u0001<\u0001"+ + "<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001>\u0001>\u0001"+ + "?\u0001?\u0001?\u0001?\u0001?\u0001?\u0001@\u0001@\u0001A\u0001A\u0001"+ + "A\u0001A\u0001A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001C\u0001D\u0001"+ + "D\u0001D\u0001E\u0001E\u0001F\u0001F\u0001F\u0001G\u0001G\u0001H\u0001"+ + "H\u0001H\u0001I\u0001I\u0001J\u0001J\u0001K\u0001K\u0001L\u0001L\u0001"+ + "M\u0001M\u0001N\u0001N\u0001N\u0001O\u0001O\u0001O\u0001P\u0001P\u0001"+ + "P\u0001P\u0001Q\u0001Q\u0001Q\u0003Q\u03b9\bQ\u0001Q\u0005Q\u03bc\bQ\n"+ + "Q\fQ\u03bf\tQ\u0001Q\u0001Q\u0004Q\u03c3\bQ\u000bQ\fQ\u03c4\u0003Q\u03c7"+ + "\bQ\u0001R\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001S\u0001S\u0001"+ + "S\u0001T\u0001T\u0005T\u03d5\bT\nT\fT\u03d8\tT\u0001T\u0001T\u0003T\u03dc"+ + "\bT\u0001T\u0004T\u03df\bT\u000bT\fT\u03e0\u0003T\u03e3\bT\u0001U\u0001"+ + "U\u0004U\u03e7\bU\u000bU\fU\u03e8\u0001U\u0001U\u0001V\u0001V\u0001W\u0001"+ + "W\u0001W\u0001W\u0001X\u0001X\u0001X\u0001X\u0001Y\u0001Y\u0001Y\u0001"+ + "Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001[\u0001[\u0001[\u0001"+ + "[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001]\u0001]\u0001^\u0001"+ + "^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001_\u0001_\u0001`\u0001`\u0001"+ + "`\u0001`\u0001a\u0001a\u0001a\u0001a\u0001b\u0001b\u0001b\u0001b\u0001"+ + "c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001d\u0001e\u0001e\u0001"+ + "e\u0001e\u0001e\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0003"+ + "f\u0436\bf\u0001g\u0004g\u0439\bg\u000bg\fg\u043a\u0001h\u0001h\u0001"+ + "h\u0001h\u0001i\u0001i\u0001i\u0001i\u0001j\u0001j\u0001j\u0001j\u0001"+ + "k\u0001k\u0001k\u0001k\u0001l\u0001l\u0001l\u0001l\u0001m\u0001m\u0001"+ + "m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001n\u0001o\u0001o\u0001o\u0001"+ + "o\u0001p\u0001p\u0001p\u0001p\u0001p\u0001q\u0001q\u0001q\u0001q\u0001"+ + "q\u0001r\u0001r\u0001r\u0001r\u0003r\u046c\br\u0001s\u0001s\u0003s\u0470"+ + "\bs\u0001s\u0005s\u0473\bs\ns\fs\u0476\ts\u0001s\u0001s\u0003s\u047a\b"+ + "s\u0001s\u0004s\u047d\bs\u000bs\fs\u047e\u0003s\u0481\bs\u0001t\u0001"+ + "t\u0004t\u0485\bt\u000bt\ft\u0486\u0001u\u0001u\u0001u\u0001u\u0001v\u0001"+ + "v\u0001v\u0001v\u0001w\u0001w\u0001w\u0001w\u0001x\u0001x\u0001x\u0001"+ + "x\u0001x\u0001y\u0001y\u0001y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001"+ + "{\u0001{\u0001{\u0001{\u0001|\u0001|\u0001|\u0001|\u0001|\u0001}\u0001"+ + "}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001~\u0001\u007f\u0001\u007f\u0001"+ + "\u007f\u0001\u007f\u0001\u0080\u0001\u0080\u0001\u0080\u0001\u0080\u0001"+ "\u0081\u0001\u0081\u0001\u0081\u0001\u0081\u0001\u0082\u0001\u0082\u0001"+ - "\u0082\u0001\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001"+ - "\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ - "\u0084\u0001\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0086\u0004"+ - "\u0086\u04d2\b\u0086\u000b\u0086\f\u0086\u04d3\u0001\u0086\u0001\u0086"+ - "\u0003\u0086\u04d8\b\u0086\u0001\u0086\u0004\u0086\u04db\b\u0086\u000b"+ - "\u0086\f\u0086\u04dc\u0001\u0087\u0001\u0087\u0001\u0087\u0001\u0087\u0001"+ - "\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0089\u0001\u0089\u0001"+ - "\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001"+ - "\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ - "\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001"+ - "\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ - "\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001\u0090\u0001"+ - "\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ - "\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001"+ - "\u0093\u0001\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001"+ + "\u0082\u0001\u0082\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001"+ + "\u0083\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001\u0084\u0001"+ + "\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0086\u0001"+ + "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001"+ + "\u0087\u0001\u0087\u0001\u0088\u0004\u0088\u04dc\b\u0088\u000b\u0088\f"+ + "\u0088\u04dd\u0001\u0088\u0001\u0088\u0003\u0088\u04e2\b\u0088\u0001\u0088"+ + "\u0004\u0088\u04e5\b\u0088\u000b\u0088\f\u0088\u04e6\u0001\u0089\u0001"+ + "\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001\u008a\u0001\u008a\u0001"+ + "\u008a\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008c\u0001"+ + "\u008c\u0001\u008c\u0001\u008c\u0001\u008d\u0001\u008d\u0001\u008d\u0001"+ + "\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001\u008e\u0001"+ + "\u008e\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u008f\u0001\u0090\u0001"+ + "\u0090\u0001\u0090\u0001\u0090\u0001\u0091\u0001\u0091\u0001\u0091\u0001"+ + "\u0091\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0092\u0001\u0093\u0001"+ + "\u0093\u0001\u0093\u0001\u0093\u0001\u0094\u0001\u0094\u0001\u0094\u0001"+ "\u0094\u0001\u0094\u0001\u0095\u0001\u0095\u0001\u0095\u0001\u0095\u0001"+ - "\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001\u0097\u0001"+ + "\u0095\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0096\u0001\u0097\u0001"+ "\u0097\u0001\u0097\u0001\u0097\u0001\u0098\u0001\u0098\u0001\u0098\u0001"+ "\u0098\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001\u0099\u0001"+ - "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001"+ + "\u009a\u0001\u009a\u0001\u009a\u0001\u009a\u0001\u009b\u0001\u009b\u0001"+ "\u009b\u0001\u009b\u0001\u009b\u0001\u009c\u0001\u009c\u0001\u009c\u0001"+ - "\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009e\u0001"+ - "\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001\u009f\u0001"+ - "\u009f\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001"+ - "\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a2\u0001"+ + "\u009c\u0001\u009c\u0001\u009d\u0001\u009d\u0001\u009d\u0001\u009d\u0001"+ + "\u009e\u0001\u009e\u0001\u009e\u0001\u009e\u0001\u009f\u0001\u009f\u0001"+ + "\u009f\u0001\u009f\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001\u00a0\u0001"+ + "\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a1\u0001\u00a2\u0001\u00a2\u0001"+ "\u00a2\u0001\u00a2\u0001\u00a2\u0001\u00a3\u0001\u00a3\u0001\u00a3\u0001"+ - "\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a5\u0001"+ + "\u00a3\u0001\u00a3\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001\u00a4\u0001"+ "\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a5\u0001\u00a6\u0001\u00a6\u0001"+ "\u00a6\u0001\u00a6\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001\u00a7\u0001"+ - "\u00a7\u0004\u00a7\u056e\b\u00a7\u000b\u00a7\f\u00a7\u056f\u0001\u00a8"+ - "\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001\u00a9\u0001\u00a9"+ - "\u0001\u00a9\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00ab"+ - "\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac"+ - "\u0001\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad"+ - "\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001\u00af"+ - "\u0001\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0"+ - "\u0001\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b2"+ - "\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001\u00b3"+ - "\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b5"+ - "\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6"+ - "\u0001\u00b6\u0001\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7"+ - "\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b9"+ - "\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001\u00ba"+ - "\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bc"+ - "\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd"+ - "\u0001\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be"+ - "\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf"+ - "\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c0"+ - "\u0001\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001\u00c1"+ - "\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c3"+ - "\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001\u00c4"+ - "\u0001\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c6"+ - "\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c7\u0001\u00c7"+ - "\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c7\u0001\u00c8\u0001\u00c8"+ - "\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c9\u0001\u00c9"+ - "\u0001\u00c9\u0001\u00c9\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001\u00ca"+ - "\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cc\u0001\u00cc"+ - "\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cd\u0001\u00cd"+ - "\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001\u00ce\u0001\u00ce"+ - "\u0001\u00ce\u0001\u00ce\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00cf"+ - "\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d1\u0001\u00d1"+ - "\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d2\u0001\u00d2"+ - "\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001\u00d3\u0001\u00d3"+ - "\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d4\u0001\u00d4"+ - "\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0002\u02ab\u02f0\u0000\u00d5\u0010"+ - "\u0001\u0012\u0002\u0014\u0003\u0016\u0004\u0018\u0005\u001a\u0006\u001c"+ - "\u0007\u001e\b \t\"\n$\u000b&\f(\r*\u000e,\u000f.\u00100\u00112\u0012"+ - "4\u00136\u00148\u0015:\u0016<\u0017>\u0018@\u0019B\u001aD\u001bF\u001c"+ - "H\u001dJ\u0000L\u0000N\u0000P\u0000R\u0000T\u0000V\u0000X\u0000Z\u0000"+ - "\\\u0000^\u001e`\u001fb d!f\"h#j$l%n&p\'r(t)v*x+z,|-~.\u0080/\u00820\u0084"+ - "1\u00862\u00883\u008a4\u008c5\u008e6\u00907\u00928\u00949\u0096:\u0098"+ - ";\u009a<\u009c=\u009e>\u00a0?\u00a2@\u00a4A\u00a6B\u00a8C\u00aaD\u00ac"+ - "\u0000\u00aeE\u00b0F\u00b2G\u00b4H\u00b6\u0000\u00b8I\u00baJ\u00bcK\u00be"+ - "L\u00c0\u0000\u00c2\u0000\u00c4M\u00c6N\u00c8O\u00ca\u0000\u00cc\u0000"+ - "\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4\u0000\u00d6P\u00d8\u0000\u00da"+ - "Q\u00dc\u0000\u00de\u0000\u00e0R\u00e2S\u00e4T\u00e6\u0000\u00e8\u0000"+ - "\u00ea\u0000\u00ec\u0000\u00ee\u0000\u00f0\u0000\u00f2\u0000\u00f4U\u00f6"+ - "V\u00f8W\u00faX\u00fc\u0000\u00fe\u0000\u0100\u0000\u0102\u0000\u0104"+ - "\u0000\u0106\u0000\u0108Y\u010a\u0000\u010cZ\u010e[\u0110\\\u0112\u0000"+ - "\u0114\u0000\u0116]\u0118^\u011a\u0000\u011c_\u011e\u0000\u0120`\u0122"+ - "a\u0124b\u0126\u0000\u0128\u0000\u012a\u0000\u012c\u0000\u012e\u0000\u0130"+ - "\u0000\u0132\u0000\u0134\u0000\u0136\u0000\u0138c\u013ad\u013ce\u013e"+ - "\u0000\u0140\u0000\u0142\u0000\u0144\u0000\u0146\u0000\u0148\u0000\u014a"+ - "f\u014cg\u014eh\u0150\u0000\u0152i\u0154j\u0156k\u0158l\u015a\u0000\u015c"+ - "\u0000\u015em\u0160n\u0162o\u0164p\u0166\u0000\u0168\u0000\u016a\u0000"+ - "\u016c\u0000\u016e\u0000\u0170\u0000\u0172\u0000\u0174q\u0176r\u0178s"+ - "\u017a\u0000\u017c\u0000\u017e\u0000\u0180\u0000\u0182t\u0184u\u0186v"+ - "\u0188\u0000\u018a\u0000\u018c\u0000\u018e\u0000\u0190w\u0192\u0000\u0194"+ - "\u0000\u0196x\u0198y\u019az\u019c\u0000\u019e\u0000\u01a0\u0000\u01a2"+ - "{\u01a4|\u01a6}\u01a8\u0000\u01aa\u0000\u01ac~\u01ae\u007f\u01b0\u0080"+ - "\u01b2\u0000\u01b4\u0000\u01b6\u0000\u01b8\u0000\u0010\u0000\u0001\u0002"+ - "\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f$\u0002\u0000"+ - "DDdd\u0002\u0000IIii\u0002\u0000SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002"+ + "\u00a7\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a8\u0001\u00a9\u0001"+ + "\u00a9\u0001\u00a9\u0001\u00a9\u0001\u00a9\u0004\u00a9\u0578\b\u00a9\u000b"+ + "\u00a9\f\u00a9\u0579\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001\u00aa\u0001"+ + "\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ab\u0001\u00ac\u0001\u00ac\u0001"+ + "\u00ac\u0001\u00ac\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001\u00ad\u0001"+ + "\u00ad\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00ae\u0001\u00af\u0001"+ + "\u00af\u0001\u00af\u0001\u00af\u0001\u00b0\u0001\u00b0\u0001\u00b0\u0001"+ + "\u00b0\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001\u00b1\u0001"+ + "\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b2\u0001\u00b3\u0001\u00b3\u0001"+ + "\u00b3\u0001\u00b3\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001\u00b4\u0001"+ + "\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b5\u0001\u00b6\u0001\u00b6\u0001"+ + "\u00b6\u0001\u00b6\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001\u00b7\u0001"+ + "\u00b7\u0001\u00b7\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001\u00b8\u0001"+ + "\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00b9\u0001\u00ba\u0001\u00ba\u0001"+ + "\u00ba\u0001\u00ba\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001\u00bb\u0001"+ + "\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bc\u0001\u00bd\u0001\u00bd\u0001"+ + "\u00bd\u0001\u00bd\u0001\u00be\u0001\u00be\u0001\u00be\u0001\u00be\u0001"+ + "\u00be\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00bf\u0001\u00c0\u0001"+ + "\u00c0\u0001\u00c0\u0001\u00c0\u0001\u00c1\u0001\u00c1\u0001\u00c1\u0001"+ + "\u00c1\u0001\u00c1\u0001\u00c1\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ + "\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001\u00c2\u0001"+ + "\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c3\u0001\u00c4\u0001\u00c4\u0001"+ + "\u00c4\u0001\u00c4\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001\u00c5\u0001"+ + "\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c6\u0001\u00c7\u0001\u00c7\u0001"+ + "\u00c7\u0001\u00c7\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001\u00c8\u0001"+ + "\u00c8\u0001\u00c9\u0001\u00c9\u0001\u00c9\u0001\u00c9\u0001\u00c9\u0001"+ + "\u00c9\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001\u00ca\u0001"+ + "\u00ca\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cb\u0001\u00cc\u0001"+ + "\u00cc\u0001\u00cc\u0001\u00cc\u0001\u00cd\u0001\u00cd\u0001\u00cd\u0001"+ + "\u00cd\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001\u00ce\u0001"+ + "\u00ce\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001\u00cf\u0001"+ + "\u00cf\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d0\u0001\u00d1\u0001"+ + "\u00d1\u0001\u00d1\u0001\u00d1\u0001\u00d2\u0001\u00d2\u0001\u00d2\u0001"+ + "\u00d2\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001\u00d3\u0001"+ + "\u00d3\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0001\u00d4\u0001"+ + "\u00d4\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001\u00d5\u0001"+ + "\u00d5\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0001\u00d6\u0002"+ + "\u02af\u02f4\u0000\u00d7\u0010\u0001\u0012\u0002\u0014\u0003\u0016\u0004"+ + "\u0018\u0005\u001a\u0006\u001c\u0007\u001e\b \t\"\n$\u000b&\f(\r*\u000e"+ + ",\u000f.\u00100\u00112\u00124\u00136\u00148\u0015:\u0016<\u0017>\u0018"+ + "@\u0019B\u001aD\u001bF\u001cH\u001dJ\u0000L\u0000N\u0000P\u0000R\u0000"+ + "T\u0000V\u0000X\u0000Z\u0000\\\u0000^\u001e`\u001fb d!f\"h#j$l%n&p\'r"+ + "(t)v*x+z,|-~.\u0080/\u00820\u00841\u00862\u00883\u008a4\u008c5\u008e6"+ + "\u00907\u00928\u00949\u0096:\u0098;\u009a<\u009c=\u009e>\u00a0?\u00a2"+ + "@\u00a4A\u00a6B\u00a8C\u00aaD\u00acE\u00aeF\u00b0\u0000\u00b2G\u00b4H"+ + "\u00b6I\u00b8J\u00ba\u0000\u00bcK\u00beL\u00c0M\u00c2N\u00c4\u0000\u00c6"+ + "\u0000\u00c8O\u00caP\u00ccQ\u00ce\u0000\u00d0\u0000\u00d2\u0000\u00d4"+ + "\u0000\u00d6\u0000\u00d8\u0000\u00daR\u00dc\u0000\u00deS\u00e0\u0000\u00e2"+ + "\u0000\u00e4T\u00e6U\u00e8V\u00ea\u0000\u00ec\u0000\u00ee\u0000\u00f0"+ + "\u0000\u00f2\u0000\u00f4\u0000\u00f6\u0000\u00f8W\u00faX\u00fcY\u00fe"+ + "Z\u0100\u0000\u0102\u0000\u0104\u0000\u0106\u0000\u0108\u0000\u010a\u0000"+ + "\u010c[\u010e\u0000\u0110\\\u0112]\u0114^\u0116\u0000\u0118\u0000\u011a"+ + "_\u011c`\u011e\u0000\u0120a\u0122\u0000\u0124b\u0126c\u0128d\u012a\u0000"+ + "\u012c\u0000\u012e\u0000\u0130\u0000\u0132\u0000\u0134\u0000\u0136\u0000"+ + "\u0138\u0000\u013a\u0000\u013ce\u013ef\u0140g\u0142\u0000\u0144\u0000"+ + "\u0146\u0000\u0148\u0000\u014a\u0000\u014c\u0000\u014eh\u0150i\u0152j"+ + "\u0154\u0000\u0156k\u0158l\u015am\u015cn\u015e\u0000\u0160\u0000\u0162"+ + "o\u0164p\u0166q\u0168r\u016a\u0000\u016c\u0000\u016e\u0000\u0170\u0000"+ + "\u0172\u0000\u0174\u0000\u0176\u0000\u0178s\u017at\u017cu\u017e\u0000"+ + "\u0180\u0000\u0182\u0000\u0184\u0000\u0186v\u0188w\u018ax\u018c\u0000"+ + "\u018e\u0000\u0190\u0000\u0192\u0000\u0194y\u0196\u0000\u0198\u0000\u019a"+ + "z\u019c{\u019e|\u01a0\u0000\u01a2\u0000\u01a4\u0000\u01a6}\u01a8~\u01aa"+ + "\u007f\u01ac\u0000\u01ae\u0000\u01b0\u0080\u01b2\u0081\u01b4\u0082\u01b6"+ + "\u0000\u01b8\u0000\u01ba\u0000\u01bc\u0000\u0010\u0000\u0001\u0002\u0003"+ + "\u0004\u0005\u0006\u0007\b\t\n\u000b\f\r\u000e\u000f$\u0002\u0000DDdd"+ + "\u0002\u0000IIii\u0002\u0000SSss\u0002\u0000EEee\u0002\u0000CCcc\u0002"+ "\u0000TTtt\u0002\u0000RRrr\u0002\u0000OOoo\u0002\u0000PPpp\u0002\u0000"+ "NNnn\u0002\u0000HHhh\u0002\u0000VVvv\u0002\u0000AAaa\u0002\u0000LLll\u0002"+ "\u0000XXxx\u0002\u0000FFff\u0002\u0000MMmm\u0002\u0000GGgg\u0002\u0000"+ @@ -637,7 +657,7 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u0000AZaz\b\u0000\"\"NNRRTT\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002"+ "\u0000++--\u0001\u0000``\u0002\u0000BBbb\u0002\u0000YYyy\u000b\u0000\t"+ "\n\r\r \"\",,//::==[[]]||\u0002\u0000**//\u000b\u0000\t\n\r\r \"#,,"+ - "//::<<>?\\\\||\u065c\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001"+ + "//::<<>?\\\\||\u0666\u0000\u0010\u0001\u0000\u0000\u0000\u0000\u0012\u0001"+ "\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000\u0016\u0001"+ "\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000\u001a\u0001"+ "\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000\u001e\u0001"+ @@ -671,757 +691,762 @@ private boolean MVEXPAND_NAMED_OR_POSITIONAL_PARAM_sempred(RuleContext _localctx "\u0000\u0001\u00aa\u0001\u0000\u0000\u0000\u0001\u00ac\u0001\u0000\u0000"+ "\u0000\u0001\u00ae\u0001\u0000\u0000\u0000\u0001\u00b0\u0001\u0000\u0000"+ "\u0000\u0001\u00b2\u0001\u0000\u0000\u0000\u0001\u00b4\u0001\u0000\u0000"+ - "\u0000\u0001\u00b8\u0001\u0000\u0000\u0000\u0001\u00ba\u0001\u0000\u0000"+ + "\u0000\u0001\u00b6\u0001\u0000\u0000\u0000\u0001\u00b8\u0001\u0000\u0000"+ "\u0000\u0001\u00bc\u0001\u0000\u0000\u0000\u0001\u00be\u0001\u0000\u0000"+ - "\u0000\u0002\u00c0\u0001\u0000\u0000\u0000\u0002\u00c2\u0001\u0000\u0000"+ + "\u0000\u0001\u00c0\u0001\u0000\u0000\u0000\u0001\u00c2\u0001\u0000\u0000"+ "\u0000\u0002\u00c4\u0001\u0000\u0000\u0000\u0002\u00c6\u0001\u0000\u0000"+ - "\u0000\u0002\u00c8\u0001\u0000\u0000\u0000\u0003\u00ca\u0001\u0000\u0000"+ - "\u0000\u0003\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce\u0001\u0000\u0000"+ + "\u0000\u0002\u00c8\u0001\u0000\u0000\u0000\u0002\u00ca\u0001\u0000\u0000"+ + "\u0000\u0002\u00cc\u0001\u0000\u0000\u0000\u0003\u00ce\u0001\u0000\u0000"+ "\u0000\u0003\u00d0\u0001\u0000\u0000\u0000\u0003\u00d2\u0001\u0000\u0000"+ "\u0000\u0003\u00d4\u0001\u0000\u0000\u0000\u0003\u00d6\u0001\u0000\u0000"+ - "\u0000\u0003\u00da\u0001\u0000\u0000\u0000\u0003\u00dc\u0001\u0000\u0000"+ + "\u0000\u0003\u00d8\u0001\u0000\u0000\u0000\u0003\u00da\u0001\u0000\u0000"+ "\u0000\u0003\u00de\u0001\u0000\u0000\u0000\u0003\u00e0\u0001\u0000\u0000"+ "\u0000\u0003\u00e2\u0001\u0000\u0000\u0000\u0003\u00e4\u0001\u0000\u0000"+ - "\u0000\u0004\u00e6\u0001\u0000\u0000\u0000\u0004\u00e8\u0001\u0000\u0000"+ + "\u0000\u0003\u00e6\u0001\u0000\u0000\u0000\u0003\u00e8\u0001\u0000\u0000"+ "\u0000\u0004\u00ea\u0001\u0000\u0000\u0000\u0004\u00ec\u0001\u0000\u0000"+ - "\u0000\u0004\u00ee\u0001\u0000\u0000\u0000\u0004\u00f4\u0001\u0000\u0000"+ - "\u0000\u0004\u00f6\u0001\u0000\u0000\u0000\u0004\u00f8\u0001\u0000\u0000"+ - "\u0000\u0004\u00fa\u0001\u0000\u0000\u0000\u0005\u00fc\u0001\u0000\u0000"+ - "\u0000\u0005\u00fe\u0001\u0000\u0000\u0000\u0005\u0100\u0001\u0000\u0000"+ + "\u0000\u0004\u00ee\u0001\u0000\u0000\u0000\u0004\u00f0\u0001\u0000\u0000"+ + "\u0000\u0004\u00f2\u0001\u0000\u0000\u0000\u0004\u00f8\u0001\u0000\u0000"+ + "\u0000\u0004\u00fa\u0001\u0000\u0000\u0000\u0004\u00fc\u0001\u0000\u0000"+ + "\u0000\u0004\u00fe\u0001\u0000\u0000\u0000\u0005\u0100\u0001\u0000\u0000"+ "\u0000\u0005\u0102\u0001\u0000\u0000\u0000\u0005\u0104\u0001\u0000\u0000"+ "\u0000\u0005\u0106\u0001\u0000\u0000\u0000\u0005\u0108\u0001\u0000\u0000"+ "\u0000\u0005\u010a\u0001\u0000\u0000\u0000\u0005\u010c\u0001\u0000\u0000"+ "\u0000\u0005\u010e\u0001\u0000\u0000\u0000\u0005\u0110\u0001\u0000\u0000"+ - "\u0000\u0006\u0112\u0001\u0000\u0000\u0000\u0006\u0114\u0001\u0000\u0000"+ + "\u0000\u0005\u0112\u0001\u0000\u0000\u0000\u0005\u0114\u0001\u0000\u0000"+ "\u0000\u0006\u0116\u0001\u0000\u0000\u0000\u0006\u0118\u0001\u0000\u0000"+ - "\u0000\u0006\u011c\u0001\u0000\u0000\u0000\u0006\u011e\u0001\u0000\u0000"+ + "\u0000\u0006\u011a\u0001\u0000\u0000\u0000\u0006\u011c\u0001\u0000\u0000"+ "\u0000\u0006\u0120\u0001\u0000\u0000\u0000\u0006\u0122\u0001\u0000\u0000"+ - "\u0000\u0006\u0124\u0001\u0000\u0000\u0000\u0007\u0126\u0001\u0000\u0000"+ - "\u0000\u0007\u0128\u0001\u0000\u0000\u0000\u0007\u012a\u0001\u0000\u0000"+ + "\u0000\u0006\u0124\u0001\u0000\u0000\u0000\u0006\u0126\u0001\u0000\u0000"+ + "\u0000\u0006\u0128\u0001\u0000\u0000\u0000\u0007\u012a\u0001\u0000\u0000"+ "\u0000\u0007\u012c\u0001\u0000\u0000\u0000\u0007\u012e\u0001\u0000\u0000"+ "\u0000\u0007\u0130\u0001\u0000\u0000\u0000\u0007\u0132\u0001\u0000\u0000"+ "\u0000\u0007\u0134\u0001\u0000\u0000\u0000\u0007\u0136\u0001\u0000\u0000"+ "\u0000\u0007\u0138\u0001\u0000\u0000\u0000\u0007\u013a\u0001\u0000\u0000"+ - "\u0000\u0007\u013c\u0001\u0000\u0000\u0000\b\u013e\u0001\u0000\u0000\u0000"+ - "\b\u0140\u0001\u0000\u0000\u0000\b\u0142\u0001\u0000\u0000\u0000\b\u0144"+ - "\u0001\u0000\u0000\u0000\b\u0146\u0001\u0000\u0000\u0000\b\u0148\u0001"+ - "\u0000\u0000\u0000\b\u014a\u0001\u0000\u0000\u0000\b\u014c\u0001\u0000"+ - "\u0000\u0000\b\u014e\u0001\u0000\u0000\u0000\t\u0150\u0001\u0000\u0000"+ - "\u0000\t\u0152\u0001\u0000\u0000\u0000\t\u0154\u0001\u0000\u0000\u0000"+ - "\t\u0156\u0001\u0000\u0000\u0000\t\u0158\u0001\u0000\u0000\u0000\n\u015a"+ - "\u0001\u0000\u0000\u0000\n\u015c\u0001\u0000\u0000\u0000\n\u015e\u0001"+ - "\u0000\u0000\u0000\n\u0160\u0001\u0000\u0000\u0000\n\u0162\u0001\u0000"+ - "\u0000\u0000\n\u0164\u0001\u0000\u0000\u0000\u000b\u0166\u0001\u0000\u0000"+ - "\u0000\u000b\u0168\u0001\u0000\u0000\u0000\u000b\u016a\u0001\u0000\u0000"+ + "\u0000\u0007\u013c\u0001\u0000\u0000\u0000\u0007\u013e\u0001\u0000\u0000"+ + "\u0000\u0007\u0140\u0001\u0000\u0000\u0000\b\u0142\u0001\u0000\u0000\u0000"+ + "\b\u0144\u0001\u0000\u0000\u0000\b\u0146\u0001\u0000\u0000\u0000\b\u0148"+ + "\u0001\u0000\u0000\u0000\b\u014a\u0001\u0000\u0000\u0000\b\u014c\u0001"+ + "\u0000\u0000\u0000\b\u014e\u0001\u0000\u0000\u0000\b\u0150\u0001\u0000"+ + "\u0000\u0000\b\u0152\u0001\u0000\u0000\u0000\t\u0154\u0001\u0000\u0000"+ + "\u0000\t\u0156\u0001\u0000\u0000\u0000\t\u0158\u0001\u0000\u0000\u0000"+ + "\t\u015a\u0001\u0000\u0000\u0000\t\u015c\u0001\u0000\u0000\u0000\n\u015e"+ + "\u0001\u0000\u0000\u0000\n\u0160\u0001\u0000\u0000\u0000\n\u0162\u0001"+ + "\u0000\u0000\u0000\n\u0164\u0001\u0000\u0000\u0000\n\u0166\u0001\u0000"+ + "\u0000\u0000\n\u0168\u0001\u0000\u0000\u0000\u000b\u016a\u0001\u0000\u0000"+ "\u0000\u000b\u016c\u0001\u0000\u0000\u0000\u000b\u016e\u0001\u0000\u0000"+ "\u0000\u000b\u0170\u0001\u0000\u0000\u0000\u000b\u0172\u0001\u0000\u0000"+ "\u0000\u000b\u0174\u0001\u0000\u0000\u0000\u000b\u0176\u0001\u0000\u0000"+ - "\u0000\u000b\u0178\u0001\u0000\u0000\u0000\f\u017a\u0001\u0000\u0000\u0000"+ - "\f\u017c\u0001\u0000\u0000\u0000\f\u017e\u0001\u0000\u0000\u0000\f\u0180"+ - "\u0001\u0000\u0000\u0000\f\u0182\u0001\u0000\u0000\u0000\f\u0184\u0001"+ - "\u0000\u0000\u0000\f\u0186\u0001\u0000\u0000\u0000\r\u0188\u0001\u0000"+ - "\u0000\u0000\r\u018a\u0001\u0000\u0000\u0000\r\u018c\u0001\u0000\u0000"+ - "\u0000\r\u018e\u0001\u0000\u0000\u0000\r\u0190\u0001\u0000\u0000\u0000"+ - "\r\u0192\u0001\u0000\u0000\u0000\r\u0194\u0001\u0000\u0000\u0000\r\u0196"+ - "\u0001\u0000\u0000\u0000\r\u0198\u0001\u0000\u0000\u0000\r\u019a\u0001"+ - "\u0000\u0000\u0000\u000e\u019c\u0001\u0000\u0000\u0000\u000e\u019e\u0001"+ + "\u0000\u000b\u0178\u0001\u0000\u0000\u0000\u000b\u017a\u0001\u0000\u0000"+ + "\u0000\u000b\u017c\u0001\u0000\u0000\u0000\f\u017e\u0001\u0000\u0000\u0000"+ + "\f\u0180\u0001\u0000\u0000\u0000\f\u0182\u0001\u0000\u0000\u0000\f\u0184"+ + "\u0001\u0000\u0000\u0000\f\u0186\u0001\u0000\u0000\u0000\f\u0188\u0001"+ + "\u0000\u0000\u0000\f\u018a\u0001\u0000\u0000\u0000\r\u018c\u0001\u0000"+ + "\u0000\u0000\r\u018e\u0001\u0000\u0000\u0000\r\u0190\u0001\u0000\u0000"+ + "\u0000\r\u0192\u0001\u0000\u0000\u0000\r\u0194\u0001\u0000\u0000\u0000"+ + "\r\u0196\u0001\u0000\u0000\u0000\r\u0198\u0001\u0000\u0000\u0000\r\u019a"+ + "\u0001\u0000\u0000\u0000\r\u019c\u0001\u0000\u0000\u0000\r\u019e\u0001"+ "\u0000\u0000\u0000\u000e\u01a0\u0001\u0000\u0000\u0000\u000e\u01a2\u0001"+ "\u0000\u0000\u0000\u000e\u01a4\u0001\u0000\u0000\u0000\u000e\u01a6\u0001"+ - "\u0000\u0000\u0000\u000f\u01a8\u0001\u0000\u0000\u0000\u000f\u01aa\u0001"+ + "\u0000\u0000\u0000\u000e\u01a8\u0001\u0000\u0000\u0000\u000e\u01aa\u0001"+ "\u0000\u0000\u0000\u000f\u01ac\u0001\u0000\u0000\u0000\u000f\u01ae\u0001"+ "\u0000\u0000\u0000\u000f\u01b0\u0001\u0000\u0000\u0000\u000f\u01b2\u0001"+ "\u0000\u0000\u0000\u000f\u01b4\u0001\u0000\u0000\u0000\u000f\u01b6\u0001"+ - "\u0000\u0000\u0000\u000f\u01b8\u0001\u0000\u0000\u0000\u0010\u01ba\u0001"+ - "\u0000\u0000\u0000\u0012\u01c4\u0001\u0000\u0000\u0000\u0014\u01cb\u0001"+ - "\u0000\u0000\u0000\u0016\u01d4\u0001\u0000\u0000\u0000\u0018\u01db\u0001"+ - "\u0000\u0000\u0000\u001a\u01e5\u0001\u0000\u0000\u0000\u001c\u01ec\u0001"+ - "\u0000\u0000\u0000\u001e\u01f3\u0001\u0000\u0000\u0000 \u01fa\u0001\u0000"+ - "\u0000\u0000\"\u0202\u0001\u0000\u0000\u0000$\u020e\u0001\u0000\u0000"+ - "\u0000&\u0217\u0001\u0000\u0000\u0000(\u021d\u0001\u0000\u0000\u0000*"+ - "\u0224\u0001\u0000\u0000\u0000,\u022b\u0001\u0000\u0000\u0000.\u0233\u0001"+ - "\u0000\u0000\u00000\u023b\u0001\u0000\u0000\u00002\u024a\u0001\u0000\u0000"+ - "\u00004\u0256\u0001\u0000\u0000\u00006\u0261\u0001\u0000\u0000\u00008"+ - "\u0269\u0001\u0000\u0000\u0000:\u0271\u0001\u0000\u0000\u0000<\u0279\u0001"+ - "\u0000\u0000\u0000>\u0282\u0001\u0000\u0000\u0000@\u028d\u0001\u0000\u0000"+ - "\u0000B\u0293\u0001\u0000\u0000\u0000D\u02a4\u0001\u0000\u0000\u0000F"+ - "\u02b4\u0001\u0000\u0000\u0000H\u02ba\u0001\u0000\u0000\u0000J\u02be\u0001"+ - "\u0000\u0000\u0000L\u02c0\u0001\u0000\u0000\u0000N\u02c2\u0001\u0000\u0000"+ - "\u0000P\u02c5\u0001\u0000\u0000\u0000R\u02c7\u0001\u0000\u0000\u0000T"+ - "\u02d0\u0001\u0000\u0000\u0000V\u02d2\u0001\u0000\u0000\u0000X\u02d7\u0001"+ - "\u0000\u0000\u0000Z\u02d9\u0001\u0000\u0000\u0000\\\u02de\u0001\u0000"+ - "\u0000\u0000^\u02fd\u0001\u0000\u0000\u0000`\u0300\u0001\u0000\u0000\u0000"+ - "b\u032e\u0001\u0000\u0000\u0000d\u0330\u0001\u0000\u0000\u0000f\u0333"+ - "\u0001\u0000\u0000\u0000h\u0337\u0001\u0000\u0000\u0000j\u033b\u0001\u0000"+ - "\u0000\u0000l\u033d\u0001\u0000\u0000\u0000n\u0340\u0001\u0000\u0000\u0000"+ - "p\u0342\u0001\u0000\u0000\u0000r\u0344\u0001\u0000\u0000\u0000t\u0349"+ - "\u0001\u0000\u0000\u0000v\u034b\u0001\u0000\u0000\u0000x\u0351\u0001\u0000"+ - "\u0000\u0000z\u0357\u0001\u0000\u0000\u0000|\u035a\u0001\u0000\u0000\u0000"+ - "~\u035d\u0001\u0000\u0000\u0000\u0080\u0362\u0001\u0000\u0000\u0000\u0082"+ - "\u0367\u0001\u0000\u0000\u0000\u0084\u0369\u0001\u0000\u0000\u0000\u0086"+ - "\u036d\u0001\u0000\u0000\u0000\u0088\u0372\u0001\u0000\u0000\u0000\u008a"+ - "\u0378\u0001\u0000\u0000\u0000\u008c\u037b\u0001\u0000\u0000\u0000\u008e"+ - "\u037d\u0001\u0000\u0000\u0000\u0090\u0383\u0001\u0000\u0000\u0000\u0092"+ - "\u0385\u0001\u0000\u0000\u0000\u0094\u038a\u0001\u0000\u0000\u0000\u0096"+ - "\u038d\u0001\u0000\u0000\u0000\u0098\u0390\u0001\u0000\u0000\u0000\u009a"+ - "\u0393\u0001\u0000\u0000\u0000\u009c\u0395\u0001\u0000\u0000\u0000\u009e"+ - "\u0398\u0001\u0000\u0000\u0000\u00a0\u039a\u0001\u0000\u0000\u0000\u00a2"+ - "\u039d\u0001\u0000\u0000\u0000\u00a4\u039f\u0001\u0000\u0000\u0000\u00a6"+ - "\u03a1\u0001\u0000\u0000\u0000\u00a8\u03a3\u0001\u0000\u0000\u0000\u00aa"+ - "\u03a5\u0001\u0000\u0000\u0000\u00ac\u03a7\u0001\u0000\u0000\u0000\u00ae"+ - "\u03bc\u0001\u0000\u0000\u0000\u00b0\u03be\u0001\u0000\u0000\u0000\u00b2"+ - "\u03c3\u0001\u0000\u0000\u0000\u00b4\u03d8\u0001\u0000\u0000\u0000\u00b6"+ - "\u03da\u0001\u0000\u0000\u0000\u00b8\u03e2\u0001\u0000\u0000\u0000\u00ba"+ - "\u03e4\u0001\u0000\u0000\u0000\u00bc\u03e8\u0001\u0000\u0000\u0000\u00be"+ - "\u03ec\u0001\u0000\u0000\u0000\u00c0\u03f0\u0001\u0000\u0000\u0000\u00c2"+ - "\u03f5\u0001\u0000\u0000\u0000\u00c4\u03fa\u0001\u0000\u0000\u0000\u00c6"+ - "\u03fe\u0001\u0000\u0000\u0000\u00c8\u0402\u0001\u0000\u0000\u0000\u00ca"+ - "\u0406\u0001\u0000\u0000\u0000\u00cc\u040b\u0001\u0000\u0000\u0000\u00ce"+ - "\u040f\u0001\u0000\u0000\u0000\u00d0\u0413\u0001\u0000\u0000\u0000\u00d2"+ - "\u0417\u0001\u0000\u0000\u0000\u00d4\u041b\u0001\u0000\u0000\u0000\u00d6"+ - "\u041f\u0001\u0000\u0000\u0000\u00d8\u042b\u0001\u0000\u0000\u0000\u00da"+ - "\u042e\u0001\u0000\u0000\u0000\u00dc\u0432\u0001\u0000\u0000\u0000\u00de"+ - "\u0436\u0001\u0000\u0000\u0000\u00e0\u043a\u0001\u0000\u0000\u0000\u00e2"+ - "\u043e\u0001\u0000\u0000\u0000\u00e4\u0442\u0001\u0000\u0000\u0000\u00e6"+ - "\u0446\u0001\u0000\u0000\u0000\u00e8\u044b\u0001\u0000\u0000\u0000\u00ea"+ - "\u044f\u0001\u0000\u0000\u0000\u00ec\u0453\u0001\u0000\u0000\u0000\u00ee"+ - "\u0458\u0001\u0000\u0000\u0000\u00f0\u0461\u0001\u0000\u0000\u0000\u00f2"+ - "\u0476\u0001\u0000\u0000\u0000\u00f4\u047a\u0001\u0000\u0000\u0000\u00f6"+ - "\u047e\u0001\u0000\u0000\u0000\u00f8\u0482\u0001\u0000\u0000\u0000\u00fa"+ - "\u0486\u0001\u0000\u0000\u0000\u00fc\u048a\u0001\u0000\u0000\u0000\u00fe"+ - "\u048f\u0001\u0000\u0000\u0000\u0100\u0493\u0001\u0000\u0000\u0000\u0102"+ - "\u0497\u0001\u0000\u0000\u0000\u0104\u049b\u0001\u0000\u0000\u0000\u0106"+ - "\u04a0\u0001\u0000\u0000\u0000\u0108\u04a5\u0001\u0000\u0000\u0000\u010a"+ - "\u04a8\u0001\u0000\u0000\u0000\u010c\u04ac\u0001\u0000\u0000\u0000\u010e"+ - "\u04b0\u0001\u0000\u0000\u0000\u0110\u04b4\u0001\u0000\u0000\u0000\u0112"+ - "\u04b8\u0001\u0000\u0000\u0000\u0114\u04bd\u0001\u0000\u0000\u0000\u0116"+ + "\u0000\u0000\u0000\u000f\u01b8\u0001\u0000\u0000\u0000\u000f\u01ba\u0001"+ + "\u0000\u0000\u0000\u000f\u01bc\u0001\u0000\u0000\u0000\u0010\u01be\u0001"+ + "\u0000\u0000\u0000\u0012\u01c8\u0001\u0000\u0000\u0000\u0014\u01cf\u0001"+ + "\u0000\u0000\u0000\u0016\u01d8\u0001\u0000\u0000\u0000\u0018\u01df\u0001"+ + "\u0000\u0000\u0000\u001a\u01e9\u0001\u0000\u0000\u0000\u001c\u01f0\u0001"+ + "\u0000\u0000\u0000\u001e\u01f7\u0001\u0000\u0000\u0000 \u01fe\u0001\u0000"+ + "\u0000\u0000\"\u0206\u0001\u0000\u0000\u0000$\u0212\u0001\u0000\u0000"+ + "\u0000&\u021b\u0001\u0000\u0000\u0000(\u0221\u0001\u0000\u0000\u0000*"+ + "\u0228\u0001\u0000\u0000\u0000,\u022f\u0001\u0000\u0000\u0000.\u0237\u0001"+ + "\u0000\u0000\u00000\u023f\u0001\u0000\u0000\u00002\u024e\u0001\u0000\u0000"+ + "\u00004\u025a\u0001\u0000\u0000\u00006\u0265\u0001\u0000\u0000\u00008"+ + "\u026d\u0001\u0000\u0000\u0000:\u0275\u0001\u0000\u0000\u0000<\u027d\u0001"+ + "\u0000\u0000\u0000>\u0286\u0001\u0000\u0000\u0000@\u0291\u0001\u0000\u0000"+ + "\u0000B\u0297\u0001\u0000\u0000\u0000D\u02a8\u0001\u0000\u0000\u0000F"+ + "\u02b8\u0001\u0000\u0000\u0000H\u02be\u0001\u0000\u0000\u0000J\u02c2\u0001"+ + "\u0000\u0000\u0000L\u02c4\u0001\u0000\u0000\u0000N\u02c6\u0001\u0000\u0000"+ + "\u0000P\u02c9\u0001\u0000\u0000\u0000R\u02cb\u0001\u0000\u0000\u0000T"+ + "\u02d4\u0001\u0000\u0000\u0000V\u02d6\u0001\u0000\u0000\u0000X\u02db\u0001"+ + "\u0000\u0000\u0000Z\u02dd\u0001\u0000\u0000\u0000\\\u02e2\u0001\u0000"+ + "\u0000\u0000^\u0301\u0001\u0000\u0000\u0000`\u0304\u0001\u0000\u0000\u0000"+ + "b\u0332\u0001\u0000\u0000\u0000d\u0334\u0001\u0000\u0000\u0000f\u0337"+ + "\u0001\u0000\u0000\u0000h\u033b\u0001\u0000\u0000\u0000j\u033f\u0001\u0000"+ + "\u0000\u0000l\u0341\u0001\u0000\u0000\u0000n\u0344\u0001\u0000\u0000\u0000"+ + "p\u0346\u0001\u0000\u0000\u0000r\u0348\u0001\u0000\u0000\u0000t\u034d"+ + "\u0001\u0000\u0000\u0000v\u034f\u0001\u0000\u0000\u0000x\u0355\u0001\u0000"+ + "\u0000\u0000z\u035b\u0001\u0000\u0000\u0000|\u035e\u0001\u0000\u0000\u0000"+ + "~\u0361\u0001\u0000\u0000\u0000\u0080\u0366\u0001\u0000\u0000\u0000\u0082"+ + "\u036b\u0001\u0000\u0000\u0000\u0084\u036d\u0001\u0000\u0000\u0000\u0086"+ + "\u0371\u0001\u0000\u0000\u0000\u0088\u0376\u0001\u0000\u0000\u0000\u008a"+ + "\u037c\u0001\u0000\u0000\u0000\u008c\u037f\u0001\u0000\u0000\u0000\u008e"+ + "\u0381\u0001\u0000\u0000\u0000\u0090\u0387\u0001\u0000\u0000\u0000\u0092"+ + "\u0389\u0001\u0000\u0000\u0000\u0094\u038e\u0001\u0000\u0000\u0000\u0096"+ + "\u0391\u0001\u0000\u0000\u0000\u0098\u0394\u0001\u0000\u0000\u0000\u009a"+ + "\u0397\u0001\u0000\u0000\u0000\u009c\u0399\u0001\u0000\u0000\u0000\u009e"+ + "\u039c\u0001\u0000\u0000\u0000\u00a0\u039e\u0001\u0000\u0000\u0000\u00a2"+ + "\u03a1\u0001\u0000\u0000\u0000\u00a4\u03a3\u0001\u0000\u0000\u0000\u00a6"+ + "\u03a5\u0001\u0000\u0000\u0000\u00a8\u03a7\u0001\u0000\u0000\u0000\u00aa"+ + "\u03a9\u0001\u0000\u0000\u0000\u00ac\u03ab\u0001\u0000\u0000\u0000\u00ae"+ + "\u03ae\u0001\u0000\u0000\u0000\u00b0\u03b1\u0001\u0000\u0000\u0000\u00b2"+ + "\u03c6\u0001\u0000\u0000\u0000\u00b4\u03c8\u0001\u0000\u0000\u0000\u00b6"+ + "\u03cd\u0001\u0000\u0000\u0000\u00b8\u03e2\u0001\u0000\u0000\u0000\u00ba"+ + "\u03e4\u0001\u0000\u0000\u0000\u00bc\u03ec\u0001\u0000\u0000\u0000\u00be"+ + "\u03ee\u0001\u0000\u0000\u0000\u00c0\u03f2\u0001\u0000\u0000\u0000\u00c2"+ + "\u03f6\u0001\u0000\u0000\u0000\u00c4\u03fa\u0001\u0000\u0000\u0000\u00c6"+ + "\u03ff\u0001\u0000\u0000\u0000\u00c8\u0404\u0001\u0000\u0000\u0000\u00ca"+ + "\u0408\u0001\u0000\u0000\u0000\u00cc\u040c\u0001\u0000\u0000\u0000\u00ce"+ + "\u0410\u0001\u0000\u0000\u0000\u00d0\u0415\u0001\u0000\u0000\u0000\u00d2"+ + "\u0419\u0001\u0000\u0000\u0000\u00d4\u041d\u0001\u0000\u0000\u0000\u00d6"+ + "\u0421\u0001\u0000\u0000\u0000\u00d8\u0425\u0001\u0000\u0000\u0000\u00da"+ + "\u0429\u0001\u0000\u0000\u0000\u00dc\u0435\u0001\u0000\u0000\u0000\u00de"+ + "\u0438\u0001\u0000\u0000\u0000\u00e0\u043c\u0001\u0000\u0000\u0000\u00e2"+ + "\u0440\u0001\u0000\u0000\u0000\u00e4\u0444\u0001\u0000\u0000\u0000\u00e6"+ + "\u0448\u0001\u0000\u0000\u0000\u00e8\u044c\u0001\u0000\u0000\u0000\u00ea"+ + "\u0450\u0001\u0000\u0000\u0000\u00ec\u0455\u0001\u0000\u0000\u0000\u00ee"+ + "\u0459\u0001\u0000\u0000\u0000\u00f0\u045d\u0001\u0000\u0000\u0000\u00f2"+ + "\u0462\u0001\u0000\u0000\u0000\u00f4\u046b\u0001\u0000\u0000\u0000\u00f6"+ + "\u0480\u0001\u0000\u0000\u0000\u00f8\u0484\u0001\u0000\u0000\u0000\u00fa"+ + "\u0488\u0001\u0000\u0000\u0000\u00fc\u048c\u0001\u0000\u0000\u0000\u00fe"+ + "\u0490\u0001\u0000\u0000\u0000\u0100\u0494\u0001\u0000\u0000\u0000\u0102"+ + "\u0499\u0001\u0000\u0000\u0000\u0104\u049d\u0001\u0000\u0000\u0000\u0106"+ + "\u04a1\u0001\u0000\u0000\u0000\u0108\u04a5\u0001\u0000\u0000\u0000\u010a"+ + "\u04aa\u0001\u0000\u0000\u0000\u010c\u04af\u0001\u0000\u0000\u0000\u010e"+ + "\u04b2\u0001\u0000\u0000\u0000\u0110\u04b6\u0001\u0000\u0000\u0000\u0112"+ + "\u04ba\u0001\u0000\u0000\u0000\u0114\u04be\u0001\u0000\u0000\u0000\u0116"+ "\u04c2\u0001\u0000\u0000\u0000\u0118\u04c7\u0001\u0000\u0000\u0000\u011a"+ - "\u04ce\u0001\u0000\u0000\u0000\u011c\u04d7\u0001\u0000\u0000\u0000\u011e"+ - "\u04de\u0001\u0000\u0000\u0000\u0120\u04e2\u0001\u0000\u0000\u0000\u0122"+ - "\u04e6\u0001\u0000\u0000\u0000\u0124\u04ea\u0001\u0000\u0000\u0000\u0126"+ - "\u04ee\u0001\u0000\u0000\u0000\u0128\u04f4\u0001\u0000\u0000\u0000\u012a"+ - "\u04f8\u0001\u0000\u0000\u0000\u012c\u04fc\u0001\u0000\u0000\u0000\u012e"+ - "\u0500\u0001\u0000\u0000\u0000\u0130\u0504\u0001\u0000\u0000\u0000\u0132"+ - "\u0508\u0001\u0000\u0000\u0000\u0134\u050c\u0001\u0000\u0000\u0000\u0136"+ - "\u0511\u0001\u0000\u0000\u0000\u0138\u0516\u0001\u0000\u0000\u0000\u013a"+ - "\u051a\u0001\u0000\u0000\u0000\u013c\u051e\u0001\u0000\u0000\u0000\u013e"+ - "\u0522\u0001\u0000\u0000\u0000\u0140\u0527\u0001\u0000\u0000\u0000\u0142"+ - "\u052b\u0001\u0000\u0000\u0000\u0144\u0530\u0001\u0000\u0000\u0000\u0146"+ - "\u0535\u0001\u0000\u0000\u0000\u0148\u0539\u0001\u0000\u0000\u0000\u014a"+ - "\u053d\u0001\u0000\u0000\u0000\u014c\u0541\u0001\u0000\u0000\u0000\u014e"+ - "\u0545\u0001\u0000\u0000\u0000\u0150\u0549\u0001\u0000\u0000\u0000\u0152"+ - "\u054e\u0001\u0000\u0000\u0000\u0154\u0553\u0001\u0000\u0000\u0000\u0156"+ - "\u0557\u0001\u0000\u0000\u0000\u0158\u055b\u0001\u0000\u0000\u0000\u015a"+ - "\u055f\u0001\u0000\u0000\u0000\u015c\u0564\u0001\u0000\u0000\u0000\u015e"+ - "\u056d\u0001\u0000\u0000\u0000\u0160\u0571\u0001\u0000\u0000\u0000\u0162"+ - "\u0575\u0001\u0000\u0000\u0000\u0164\u0579\u0001\u0000\u0000\u0000\u0166"+ - "\u057d\u0001\u0000\u0000\u0000\u0168\u0582\u0001\u0000\u0000\u0000\u016a"+ - "\u0586\u0001\u0000\u0000\u0000\u016c\u058a\u0001\u0000\u0000\u0000\u016e"+ - "\u058e\u0001\u0000\u0000\u0000\u0170\u0593\u0001\u0000\u0000\u0000\u0172"+ - "\u0597\u0001\u0000\u0000\u0000\u0174\u059b\u0001\u0000\u0000\u0000\u0176"+ - "\u059f\u0001\u0000\u0000\u0000\u0178\u05a3\u0001\u0000\u0000\u0000\u017a"+ - "\u05a7\u0001\u0000\u0000\u0000\u017c\u05ad\u0001\u0000\u0000\u0000\u017e"+ - "\u05b1\u0001\u0000\u0000\u0000\u0180\u05b5\u0001\u0000\u0000\u0000\u0182"+ - "\u05b9\u0001\u0000\u0000\u0000\u0184\u05bd\u0001\u0000\u0000\u0000\u0186"+ - "\u05c1\u0001\u0000\u0000\u0000\u0188\u05c5\u0001\u0000\u0000\u0000\u018a"+ - "\u05ca\u0001\u0000\u0000\u0000\u018c\u05ce\u0001\u0000\u0000\u0000\u018e"+ - "\u05d2\u0001\u0000\u0000\u0000\u0190\u05d8\u0001\u0000\u0000\u0000\u0192"+ - "\u05e1\u0001\u0000\u0000\u0000\u0194\u05e5\u0001\u0000\u0000\u0000\u0196"+ - "\u05e9\u0001\u0000\u0000\u0000\u0198\u05ed\u0001\u0000\u0000\u0000\u019a"+ - "\u05f1\u0001\u0000\u0000\u0000\u019c\u05f5\u0001\u0000\u0000\u0000\u019e"+ - "\u05fa\u0001\u0000\u0000\u0000\u01a0\u0600\u0001\u0000\u0000\u0000\u01a2"+ - "\u0606\u0001\u0000\u0000\u0000\u01a4\u060a\u0001\u0000\u0000\u0000\u01a6"+ - "\u060e\u0001\u0000\u0000\u0000\u01a8\u0612\u0001\u0000\u0000\u0000\u01aa"+ - "\u0618\u0001\u0000\u0000\u0000\u01ac\u061e\u0001\u0000\u0000\u0000\u01ae"+ - "\u0622\u0001\u0000\u0000\u0000\u01b0\u0626\u0001\u0000\u0000\u0000\u01b2"+ - "\u062a\u0001\u0000\u0000\u0000\u01b4\u0630\u0001\u0000\u0000\u0000\u01b6"+ - "\u0636\u0001\u0000\u0000\u0000\u01b8\u063c\u0001\u0000\u0000\u0000\u01ba"+ - "\u01bb\u0007\u0000\u0000\u0000\u01bb\u01bc\u0007\u0001\u0000\u0000\u01bc"+ - "\u01bd\u0007\u0002\u0000\u0000\u01bd\u01be\u0007\u0002\u0000\u0000\u01be"+ - "\u01bf\u0007\u0003\u0000\u0000\u01bf\u01c0\u0007\u0004\u0000\u0000\u01c0"+ - "\u01c1\u0007\u0005\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000\u0000\u01c2"+ - "\u01c3\u0006\u0000\u0000\u0000\u01c3\u0011\u0001\u0000\u0000\u0000\u01c4"+ - "\u01c5\u0007\u0000\u0000\u0000\u01c5\u01c6\u0007\u0006\u0000\u0000\u01c6"+ - "\u01c7\u0007\u0007\u0000\u0000\u01c7\u01c8\u0007\b\u0000\u0000\u01c8\u01c9"+ - "\u0001\u0000\u0000\u0000\u01c9\u01ca\u0006\u0001\u0001\u0000\u01ca\u0013"+ - "\u0001\u0000\u0000\u0000\u01cb\u01cc\u0007\u0003\u0000\u0000\u01cc\u01cd"+ - "\u0007\t\u0000\u0000\u01cd\u01ce\u0007\u0006\u0000\u0000\u01ce\u01cf\u0007"+ - "\u0001\u0000\u0000\u01cf\u01d0\u0007\u0004\u0000\u0000\u01d0\u01d1\u0007"+ - "\n\u0000\u0000\u01d1\u01d2\u0001\u0000\u0000\u0000\u01d2\u01d3\u0006\u0002"+ - "\u0002\u0000\u01d3\u0015\u0001\u0000\u0000\u0000\u01d4\u01d5\u0007\u0003"+ - "\u0000\u0000\u01d5\u01d6\u0007\u000b\u0000\u0000\u01d6\u01d7\u0007\f\u0000"+ - "\u0000\u01d7\u01d8\u0007\r\u0000\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000"+ - "\u01d9\u01da\u0006\u0003\u0000\u0000\u01da\u0017\u0001\u0000\u0000\u0000"+ - "\u01db\u01dc\u0007\u0003\u0000\u0000\u01dc\u01dd\u0007\u000e\u0000\u0000"+ - "\u01dd\u01de\u0007\b\u0000\u0000\u01de\u01df\u0007\r\u0000\u0000\u01df"+ - "\u01e0\u0007\f\u0000\u0000\u01e0\u01e1\u0007\u0001\u0000\u0000\u01e1\u01e2"+ - "\u0007\t\u0000\u0000\u01e2\u01e3\u0001\u0000\u0000\u0000\u01e3\u01e4\u0006"+ - "\u0004\u0003\u0000\u01e4\u0019\u0001\u0000\u0000\u0000\u01e5\u01e6\u0007"+ - "\u000f\u0000\u0000\u01e6\u01e7\u0007\u0006\u0000\u0000\u01e7\u01e8\u0007"+ - "\u0007\u0000\u0000\u01e8\u01e9\u0007\u0010\u0000\u0000\u01e9\u01ea\u0001"+ - "\u0000\u0000\u0000\u01ea\u01eb\u0006\u0005\u0004\u0000\u01eb\u001b\u0001"+ - "\u0000\u0000\u0000\u01ec\u01ed\u0007\u0011\u0000\u0000\u01ed\u01ee\u0007"+ - "\u0006\u0000\u0000\u01ee\u01ef\u0007\u0007\u0000\u0000\u01ef\u01f0\u0007"+ - "\u0012\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000\u01f1\u01f2\u0006"+ - "\u0006\u0000\u0000\u01f2\u001d\u0001\u0000\u0000\u0000\u01f3\u01f4\u0007"+ - "\u0012\u0000\u0000\u01f4\u01f5\u0007\u0003\u0000\u0000\u01f5\u01f6\u0007"+ - "\u0003\u0000\u0000\u01f6\u01f7\u0007\b\u0000\u0000\u01f7\u01f8\u0001\u0000"+ - "\u0000\u0000\u01f8\u01f9\u0006\u0007\u0001\u0000\u01f9\u001f\u0001\u0000"+ - "\u0000\u0000\u01fa\u01fb\u0007\r\u0000\u0000\u01fb\u01fc\u0007\u0001\u0000"+ - "\u0000\u01fc\u01fd\u0007\u0010\u0000\u0000\u01fd\u01fe\u0007\u0001\u0000"+ - "\u0000\u01fe\u01ff\u0007\u0005\u0000\u0000\u01ff\u0200\u0001\u0000\u0000"+ - "\u0000\u0200\u0201\u0006\b\u0000\u0000\u0201!\u0001\u0000\u0000\u0000"+ - "\u0202\u0203\u0007\u0010\u0000\u0000\u0203\u0204\u0007\u000b\u0000\u0000"+ - "\u0204\u0205\u0005_\u0000\u0000\u0205\u0206\u0007\u0003\u0000\u0000\u0206"+ - "\u0207\u0007\u000e\u0000\u0000\u0207\u0208\u0007\b\u0000\u0000\u0208\u0209"+ - "\u0007\f\u0000\u0000\u0209\u020a\u0007\t\u0000\u0000\u020a\u020b\u0007"+ - "\u0000\u0000\u0000\u020b\u020c\u0001\u0000\u0000\u0000\u020c\u020d\u0006"+ - "\t\u0005\u0000\u020d#\u0001\u0000\u0000\u0000\u020e\u020f\u0007\u0006"+ - "\u0000\u0000\u020f\u0210\u0007\u0003\u0000\u0000\u0210\u0211\u0007\t\u0000"+ - "\u0000\u0211\u0212\u0007\f\u0000\u0000\u0212\u0213\u0007\u0010\u0000\u0000"+ - "\u0213\u0214\u0007\u0003\u0000\u0000\u0214\u0215\u0001\u0000\u0000\u0000"+ - "\u0215\u0216\u0006\n\u0006\u0000\u0216%\u0001\u0000\u0000\u0000\u0217"+ - "\u0218\u0007\u0006\u0000\u0000\u0218\u0219\u0007\u0007\u0000\u0000\u0219"+ - "\u021a\u0007\u0013\u0000\u0000\u021a\u021b\u0001\u0000\u0000\u0000\u021b"+ - "\u021c\u0006\u000b\u0000\u0000\u021c\'\u0001\u0000\u0000\u0000\u021d\u021e"+ - "\u0007\u0002\u0000\u0000\u021e\u021f\u0007\n\u0000\u0000\u021f\u0220\u0007"+ - "\u0007\u0000\u0000\u0220\u0221\u0007\u0013\u0000\u0000\u0221\u0222\u0001"+ - "\u0000\u0000\u0000\u0222\u0223\u0006\f\u0007\u0000\u0223)\u0001\u0000"+ - "\u0000\u0000\u0224\u0225\u0007\u0002\u0000\u0000\u0225\u0226\u0007\u0007"+ - "\u0000\u0000\u0226\u0227\u0007\u0006\u0000\u0000\u0227\u0228\u0007\u0005"+ - "\u0000\u0000\u0228\u0229\u0001\u0000\u0000\u0000\u0229\u022a\u0006\r\u0000"+ - "\u0000\u022a+\u0001\u0000\u0000\u0000\u022b\u022c\u0007\u0002\u0000\u0000"+ - "\u022c\u022d\u0007\u0005\u0000\u0000\u022d\u022e\u0007\f\u0000\u0000\u022e"+ - "\u022f\u0007\u0005\u0000\u0000\u022f\u0230\u0007\u0002\u0000\u0000\u0230"+ - "\u0231\u0001\u0000\u0000\u0000\u0231\u0232\u0006\u000e\u0000\u0000\u0232"+ - "-\u0001\u0000\u0000\u0000\u0233\u0234\u0007\u0013\u0000\u0000\u0234\u0235"+ - "\u0007\n\u0000\u0000\u0235\u0236\u0007\u0003\u0000\u0000\u0236\u0237\u0007"+ - "\u0006\u0000\u0000\u0237\u0238\u0007\u0003\u0000\u0000\u0238\u0239\u0001"+ - "\u0000\u0000\u0000\u0239\u023a\u0006\u000f\u0000\u0000\u023a/\u0001\u0000"+ - "\u0000\u0000\u023b\u023c\u0004\u0010\u0000\u0000\u023c\u023d\u0007\u0001"+ - "\u0000\u0000\u023d\u023e\u0007\t\u0000\u0000\u023e\u023f\u0007\r\u0000"+ - "\u0000\u023f\u0240\u0007\u0001\u0000\u0000\u0240\u0241\u0007\t\u0000\u0000"+ - "\u0241\u0242\u0007\u0003\u0000\u0000\u0242\u0243\u0007\u0002\u0000\u0000"+ - "\u0243\u0244\u0007\u0005\u0000\u0000\u0244\u0245\u0007\f\u0000\u0000\u0245"+ - "\u0246\u0007\u0005\u0000\u0000\u0246\u0247\u0007\u0002\u0000\u0000\u0247"+ - "\u0248\u0001\u0000\u0000\u0000\u0248\u0249\u0006\u0010\u0000\u0000\u0249"+ - "1\u0001\u0000\u0000\u0000\u024a\u024b\u0004\u0011\u0001\u0000\u024b\u024c"+ - "\u0007\r\u0000\u0000\u024c\u024d\u0007\u0007\u0000\u0000\u024d\u024e\u0007"+ - "\u0007\u0000\u0000\u024e\u024f\u0007\u0012\u0000\u0000\u024f\u0250\u0007"+ - "\u0014\u0000\u0000\u0250\u0251\u0007\b\u0000\u0000\u0251\u0252\u0005_"+ - "\u0000\u0000\u0252\u0253\u0005\u8001\uf414\u0000\u0000\u0253\u0254\u0001"+ - "\u0000\u0000\u0000\u0254\u0255\u0006\u0011\b\u0000\u02553\u0001\u0000"+ - "\u0000\u0000\u0256\u0257\u0004\u0012\u0002\u0000\u0257\u0258\u0007\u0010"+ - "\u0000\u0000\u0258\u0259\u0007\u0003\u0000\u0000\u0259\u025a\u0007\u0005"+ - "\u0000\u0000\u025a\u025b\u0007\u0006\u0000\u0000\u025b\u025c\u0007\u0001"+ - "\u0000\u0000\u025c\u025d\u0007\u0004\u0000\u0000\u025d\u025e\u0007\u0002"+ - "\u0000\u0000\u025e\u025f\u0001\u0000\u0000\u0000\u025f\u0260\u0006\u0012"+ - "\t\u0000\u02605\u0001\u0000\u0000\u0000\u0261\u0262\u0004\u0013\u0003"+ - "\u0000\u0262\u0263\u0007\u0015\u0000\u0000\u0263\u0264\u0007\u0007\u0000"+ - "\u0000\u0264\u0265\u0007\u0001\u0000\u0000\u0265\u0266\u0007\t\u0000\u0000"+ - "\u0266\u0267\u0001\u0000\u0000\u0000\u0267\u0268\u0006\u0013\n\u0000\u0268"+ - "7\u0001\u0000\u0000\u0000\u0269\u026a\u0004\u0014\u0004\u0000\u026a\u026b"+ - "\u0007\u000f\u0000\u0000\u026b\u026c\u0007\u0014\u0000\u0000\u026c\u026d"+ - "\u0007\r\u0000\u0000\u026d\u026e\u0007\r\u0000\u0000\u026e\u026f\u0001"+ - "\u0000\u0000\u0000\u026f\u0270\u0006\u0014\n\u0000\u02709\u0001\u0000"+ - "\u0000\u0000\u0271\u0272\u0004\u0015\u0005\u0000\u0272\u0273\u0007\r\u0000"+ - "\u0000\u0273\u0274\u0007\u0003\u0000\u0000\u0274\u0275\u0007\u000f\u0000"+ - "\u0000\u0275\u0276\u0007\u0005\u0000\u0000\u0276\u0277\u0001\u0000\u0000"+ - "\u0000\u0277\u0278\u0006\u0015\n\u0000\u0278;\u0001\u0000\u0000\u0000"+ - "\u0279\u027a\u0004\u0016\u0006\u0000\u027a\u027b\u0007\u0006\u0000\u0000"+ - "\u027b\u027c\u0007\u0001\u0000\u0000\u027c\u027d\u0007\u0011\u0000\u0000"+ - "\u027d\u027e\u0007\n\u0000\u0000\u027e\u027f\u0007\u0005\u0000\u0000\u027f"+ - "\u0280\u0001\u0000\u0000\u0000\u0280\u0281\u0006\u0016\n\u0000\u0281="+ - "\u0001\u0000\u0000\u0000\u0282\u0283\u0004\u0017\u0007\u0000\u0283\u0284"+ - "\u0007\r\u0000\u0000\u0284\u0285\u0007\u0007\u0000\u0000\u0285\u0286\u0007"+ - "\u0007\u0000\u0000\u0286\u0287\u0007\u0012\u0000\u0000\u0287\u0288\u0007"+ - "\u0014\u0000\u0000\u0288\u0289\u0007\b\u0000\u0000\u0289\u028a\u0001\u0000"+ - "\u0000\u0000\u028a\u028b\u0006\u0017\n\u0000\u028b?\u0001\u0000\u0000"+ - "\u0000\u028c\u028e\b\u0016\u0000\u0000\u028d\u028c\u0001\u0000\u0000\u0000"+ - "\u028e\u028f\u0001\u0000\u0000\u0000\u028f\u028d\u0001\u0000\u0000\u0000"+ - "\u028f\u0290\u0001\u0000\u0000\u0000\u0290\u0291\u0001\u0000\u0000\u0000"+ - "\u0291\u0292\u0006\u0018\u0000\u0000\u0292A\u0001\u0000\u0000\u0000\u0293"+ - "\u0294\u0005/\u0000\u0000\u0294\u0295\u0005/\u0000\u0000\u0295\u0299\u0001"+ - "\u0000\u0000\u0000\u0296\u0298\b\u0017\u0000\u0000\u0297\u0296\u0001\u0000"+ - "\u0000\u0000\u0298\u029b\u0001\u0000\u0000\u0000\u0299\u0297\u0001\u0000"+ - "\u0000\u0000\u0299\u029a\u0001\u0000\u0000\u0000\u029a\u029d\u0001\u0000"+ - "\u0000\u0000\u029b\u0299\u0001\u0000\u0000\u0000\u029c\u029e\u0005\r\u0000"+ - "\u0000\u029d\u029c\u0001\u0000\u0000\u0000\u029d\u029e\u0001\u0000\u0000"+ - "\u0000\u029e\u02a0\u0001\u0000\u0000\u0000\u029f\u02a1\u0005\n\u0000\u0000"+ - "\u02a0\u029f\u0001\u0000\u0000\u0000\u02a0\u02a1\u0001\u0000\u0000\u0000"+ - "\u02a1\u02a2\u0001\u0000\u0000\u0000\u02a2\u02a3\u0006\u0019\u000b\u0000"+ - "\u02a3C\u0001\u0000\u0000\u0000\u02a4\u02a5\u0005/\u0000\u0000\u02a5\u02a6"+ - "\u0005*\u0000\u0000\u02a6\u02ab\u0001\u0000\u0000\u0000\u02a7\u02aa\u0003"+ - "D\u001a\u0000\u02a8\u02aa\t\u0000\u0000\u0000\u02a9\u02a7\u0001\u0000"+ - "\u0000\u0000\u02a9\u02a8\u0001\u0000\u0000\u0000\u02aa\u02ad\u0001\u0000"+ - "\u0000\u0000\u02ab\u02ac\u0001\u0000\u0000\u0000\u02ab\u02a9\u0001\u0000"+ - "\u0000\u0000\u02ac\u02ae\u0001\u0000\u0000\u0000\u02ad\u02ab\u0001\u0000"+ - "\u0000\u0000\u02ae\u02af\u0005*\u0000\u0000\u02af\u02b0\u0005/\u0000\u0000"+ - "\u02b0\u02b1\u0001\u0000\u0000\u0000\u02b1\u02b2\u0006\u001a\u000b\u0000"+ - "\u02b2E\u0001\u0000\u0000\u0000\u02b3\u02b5\u0007\u0018\u0000\u0000\u02b4"+ - "\u02b3\u0001\u0000\u0000\u0000\u02b5\u02b6\u0001\u0000\u0000\u0000\u02b6"+ - "\u02b4\u0001\u0000\u0000\u0000\u02b6\u02b7\u0001\u0000\u0000\u0000\u02b7"+ - "\u02b8\u0001\u0000\u0000\u0000\u02b8\u02b9\u0006\u001b\u000b\u0000\u02b9"+ - "G\u0001\u0000\u0000\u0000\u02ba\u02bb\u0005|\u0000\u0000\u02bb\u02bc\u0001"+ - "\u0000\u0000\u0000\u02bc\u02bd\u0006\u001c\f\u0000\u02bdI\u0001\u0000"+ - "\u0000\u0000\u02be\u02bf\u0007\u0019\u0000\u0000\u02bfK\u0001\u0000\u0000"+ - "\u0000\u02c0\u02c1\u0007\u001a\u0000\u0000\u02c1M\u0001\u0000\u0000\u0000"+ - "\u02c2\u02c3\u0005\\\u0000\u0000\u02c3\u02c4\u0007\u001b\u0000\u0000\u02c4"+ - "O\u0001\u0000\u0000\u0000\u02c5\u02c6\b\u001c\u0000\u0000\u02c6Q\u0001"+ - "\u0000\u0000\u0000\u02c7\u02c9\u0007\u0003\u0000\u0000\u02c8\u02ca\u0007"+ - "\u001d\u0000\u0000\u02c9\u02c8\u0001\u0000\u0000\u0000\u02c9\u02ca\u0001"+ - "\u0000\u0000\u0000\u02ca\u02cc\u0001\u0000\u0000\u0000\u02cb\u02cd\u0003"+ - "J\u001d\u0000\u02cc\u02cb\u0001\u0000\u0000\u0000\u02cd\u02ce\u0001\u0000"+ - "\u0000\u0000\u02ce\u02cc\u0001\u0000\u0000\u0000\u02ce\u02cf\u0001\u0000"+ - "\u0000\u0000\u02cfS\u0001\u0000\u0000\u0000\u02d0\u02d1\u0005@\u0000\u0000"+ - "\u02d1U\u0001\u0000\u0000\u0000\u02d2\u02d3\u0005`\u0000\u0000\u02d3W"+ - "\u0001\u0000\u0000\u0000\u02d4\u02d8\b\u001e\u0000\u0000\u02d5\u02d6\u0005"+ - "`\u0000\u0000\u02d6\u02d8\u0005`\u0000\u0000\u02d7\u02d4\u0001\u0000\u0000"+ - "\u0000\u02d7\u02d5\u0001\u0000\u0000\u0000\u02d8Y\u0001\u0000\u0000\u0000"+ - "\u02d9\u02da\u0005_\u0000\u0000\u02da[\u0001\u0000\u0000\u0000\u02db\u02df"+ - "\u0003L\u001e\u0000\u02dc\u02df\u0003J\u001d\u0000\u02dd\u02df\u0003Z"+ - "%\u0000\u02de\u02db\u0001\u0000\u0000\u0000\u02de\u02dc\u0001\u0000\u0000"+ - "\u0000\u02de\u02dd\u0001\u0000\u0000\u0000\u02df]\u0001\u0000\u0000\u0000"+ - "\u02e0\u02e5\u0005\"\u0000\u0000\u02e1\u02e4\u0003N\u001f\u0000\u02e2"+ - "\u02e4\u0003P \u0000\u02e3\u02e1\u0001\u0000\u0000\u0000\u02e3\u02e2\u0001"+ - "\u0000\u0000\u0000\u02e4\u02e7\u0001\u0000\u0000\u0000\u02e5\u02e3\u0001"+ - "\u0000\u0000\u0000\u02e5\u02e6\u0001\u0000\u0000\u0000\u02e6\u02e8\u0001"+ - "\u0000\u0000\u0000\u02e7\u02e5\u0001\u0000\u0000\u0000\u02e8\u02fe\u0005"+ - "\"\u0000\u0000\u02e9\u02ea\u0005\"\u0000\u0000\u02ea\u02eb\u0005\"\u0000"+ - "\u0000\u02eb\u02ec\u0005\"\u0000\u0000\u02ec\u02f0\u0001\u0000\u0000\u0000"+ - "\u02ed\u02ef\b\u0017\u0000\u0000\u02ee\u02ed\u0001\u0000\u0000\u0000\u02ef"+ - "\u02f2\u0001\u0000\u0000\u0000\u02f0\u02f1\u0001\u0000\u0000\u0000\u02f0"+ - "\u02ee\u0001\u0000\u0000\u0000\u02f1\u02f3\u0001\u0000\u0000\u0000\u02f2"+ - "\u02f0\u0001\u0000\u0000\u0000\u02f3\u02f4\u0005\"\u0000\u0000\u02f4\u02f5"+ - "\u0005\"\u0000\u0000\u02f5\u02f6\u0005\"\u0000\u0000\u02f6\u02f8\u0001"+ - "\u0000\u0000\u0000\u02f7\u02f9\u0005\"\u0000\u0000\u02f8\u02f7\u0001\u0000"+ - "\u0000\u0000\u02f8\u02f9\u0001\u0000\u0000\u0000\u02f9\u02fb\u0001\u0000"+ - "\u0000\u0000\u02fa\u02fc\u0005\"\u0000\u0000\u02fb\u02fa\u0001\u0000\u0000"+ - "\u0000\u02fb\u02fc\u0001\u0000\u0000\u0000\u02fc\u02fe\u0001\u0000\u0000"+ - "\u0000\u02fd\u02e0\u0001\u0000\u0000\u0000\u02fd\u02e9\u0001\u0000\u0000"+ - "\u0000\u02fe_\u0001\u0000\u0000\u0000\u02ff\u0301\u0003J\u001d\u0000\u0300"+ - "\u02ff\u0001\u0000\u0000\u0000\u0301\u0302\u0001\u0000\u0000\u0000\u0302"+ - "\u0300\u0001\u0000\u0000\u0000\u0302\u0303\u0001\u0000\u0000\u0000\u0303"+ - "a\u0001\u0000\u0000\u0000\u0304\u0306\u0003J\u001d\u0000\u0305\u0304\u0001"+ - "\u0000\u0000\u0000\u0306\u0307\u0001\u0000\u0000\u0000\u0307\u0305\u0001"+ - "\u0000\u0000\u0000\u0307\u0308\u0001\u0000\u0000\u0000\u0308\u0309\u0001"+ - "\u0000\u0000\u0000\u0309\u030d\u0003t2\u0000\u030a\u030c\u0003J\u001d"+ - "\u0000\u030b\u030a\u0001\u0000\u0000\u0000\u030c\u030f\u0001\u0000\u0000"+ - "\u0000\u030d\u030b\u0001\u0000\u0000\u0000\u030d\u030e\u0001\u0000\u0000"+ - "\u0000\u030e\u032f\u0001\u0000\u0000\u0000\u030f\u030d\u0001\u0000\u0000"+ - "\u0000\u0310\u0312\u0003t2\u0000\u0311\u0313\u0003J\u001d\u0000\u0312"+ - "\u0311\u0001\u0000\u0000\u0000\u0313\u0314\u0001\u0000\u0000\u0000\u0314"+ - "\u0312\u0001\u0000\u0000\u0000\u0314\u0315\u0001\u0000\u0000\u0000\u0315"+ - "\u032f\u0001\u0000\u0000\u0000\u0316\u0318\u0003J\u001d\u0000\u0317\u0316"+ - "\u0001\u0000\u0000\u0000\u0318\u0319\u0001\u0000\u0000\u0000\u0319\u0317"+ - "\u0001\u0000\u0000\u0000\u0319\u031a\u0001\u0000\u0000\u0000\u031a\u0322"+ - "\u0001\u0000\u0000\u0000\u031b\u031f\u0003t2\u0000\u031c\u031e\u0003J"+ - "\u001d\u0000\u031d\u031c\u0001\u0000\u0000\u0000\u031e\u0321\u0001\u0000"+ - "\u0000\u0000\u031f\u031d\u0001\u0000\u0000\u0000\u031f\u0320\u0001\u0000"+ - "\u0000\u0000\u0320\u0323\u0001\u0000\u0000\u0000\u0321\u031f\u0001\u0000"+ - "\u0000\u0000\u0322\u031b\u0001\u0000\u0000\u0000\u0322\u0323\u0001\u0000"+ - "\u0000\u0000\u0323\u0324\u0001\u0000\u0000\u0000\u0324\u0325\u0003R!\u0000"+ - "\u0325\u032f\u0001\u0000\u0000\u0000\u0326\u0328\u0003t2\u0000\u0327\u0329"+ - "\u0003J\u001d\u0000\u0328\u0327\u0001\u0000\u0000\u0000\u0329\u032a\u0001"+ - "\u0000\u0000\u0000\u032a\u0328\u0001\u0000\u0000\u0000\u032a\u032b\u0001"+ - "\u0000\u0000\u0000\u032b\u032c\u0001\u0000\u0000\u0000\u032c\u032d\u0003"+ - "R!\u0000\u032d\u032f\u0001\u0000\u0000\u0000\u032e\u0305\u0001\u0000\u0000"+ - "\u0000\u032e\u0310\u0001\u0000\u0000\u0000\u032e\u0317\u0001\u0000\u0000"+ - "\u0000\u032e\u0326\u0001\u0000\u0000\u0000\u032fc\u0001\u0000\u0000\u0000"+ - "\u0330\u0331\u0007\u001f\u0000\u0000\u0331\u0332\u0007 \u0000\u0000\u0332"+ - "e\u0001\u0000\u0000\u0000\u0333\u0334\u0007\f\u0000\u0000\u0334\u0335"+ - "\u0007\t\u0000\u0000\u0335\u0336\u0007\u0000\u0000\u0000\u0336g\u0001"+ - "\u0000\u0000\u0000\u0337\u0338\u0007\f\u0000\u0000\u0338\u0339\u0007\u0002"+ - "\u0000\u0000\u0339\u033a\u0007\u0004\u0000\u0000\u033ai\u0001\u0000\u0000"+ - "\u0000\u033b\u033c\u0005=\u0000\u0000\u033ck\u0001\u0000\u0000\u0000\u033d"+ - "\u033e\u0005:\u0000\u0000\u033e\u033f\u0005:\u0000\u0000\u033fm\u0001"+ - "\u0000\u0000\u0000\u0340\u0341\u0005:\u0000\u0000\u0341o\u0001\u0000\u0000"+ - "\u0000\u0342\u0343\u0005,\u0000\u0000\u0343q\u0001\u0000\u0000\u0000\u0344"+ - "\u0345\u0007\u0000\u0000\u0000\u0345\u0346\u0007\u0003\u0000\u0000\u0346"+ - "\u0347\u0007\u0002\u0000\u0000\u0347\u0348\u0007\u0004\u0000\u0000\u0348"+ - "s\u0001\u0000\u0000\u0000\u0349\u034a\u0005.\u0000\u0000\u034au\u0001"+ - "\u0000\u0000\u0000\u034b\u034c\u0007\u000f\u0000\u0000\u034c\u034d\u0007"+ - "\f\u0000\u0000\u034d\u034e\u0007\r\u0000\u0000\u034e\u034f\u0007\u0002"+ - "\u0000\u0000\u034f\u0350\u0007\u0003\u0000\u0000\u0350w\u0001\u0000\u0000"+ - "\u0000\u0351\u0352\u0007\u000f\u0000\u0000\u0352\u0353\u0007\u0001\u0000"+ - "\u0000\u0353\u0354\u0007\u0006\u0000\u0000\u0354\u0355\u0007\u0002\u0000"+ - "\u0000\u0355\u0356\u0007\u0005\u0000\u0000\u0356y\u0001\u0000\u0000\u0000"+ - "\u0357\u0358\u0007\u0001\u0000\u0000\u0358\u0359\u0007\t\u0000\u0000\u0359"+ - "{\u0001\u0000\u0000\u0000\u035a\u035b\u0007\u0001\u0000\u0000\u035b\u035c"+ - "\u0007\u0002\u0000\u0000\u035c}\u0001\u0000\u0000\u0000\u035d\u035e\u0007"+ - "\r\u0000\u0000\u035e\u035f\u0007\f\u0000\u0000\u035f\u0360\u0007\u0002"+ - "\u0000\u0000\u0360\u0361\u0007\u0005\u0000\u0000\u0361\u007f\u0001\u0000"+ - "\u0000\u0000\u0362\u0363\u0007\r\u0000\u0000\u0363\u0364\u0007\u0001\u0000"+ - "\u0000\u0364\u0365\u0007\u0012\u0000\u0000\u0365\u0366\u0007\u0003\u0000"+ - "\u0000\u0366\u0081\u0001\u0000\u0000\u0000\u0367\u0368\u0005(\u0000\u0000"+ - "\u0368\u0083\u0001\u0000\u0000\u0000\u0369\u036a\u0007\t\u0000\u0000\u036a"+ - "\u036b\u0007\u0007\u0000\u0000\u036b\u036c\u0007\u0005\u0000\u0000\u036c"+ - "\u0085\u0001\u0000\u0000\u0000\u036d\u036e\u0007\t\u0000\u0000\u036e\u036f"+ - "\u0007\u0014\u0000\u0000\u036f\u0370\u0007\r\u0000\u0000\u0370\u0371\u0007"+ - "\r\u0000\u0000\u0371\u0087\u0001\u0000\u0000\u0000\u0372\u0373\u0007\t"+ - "\u0000\u0000\u0373\u0374\u0007\u0014\u0000\u0000\u0374\u0375\u0007\r\u0000"+ - "\u0000\u0375\u0376\u0007\r\u0000\u0000\u0376\u0377\u0007\u0002\u0000\u0000"+ - "\u0377\u0089\u0001\u0000\u0000\u0000\u0378\u0379\u0007\u0007\u0000\u0000"+ - "\u0379\u037a\u0007\u0006\u0000\u0000\u037a\u008b\u0001\u0000\u0000\u0000"+ - "\u037b\u037c\u0005?\u0000\u0000\u037c\u008d\u0001\u0000\u0000\u0000\u037d"+ - "\u037e\u0007\u0006\u0000\u0000\u037e\u037f\u0007\r\u0000\u0000\u037f\u0380"+ - "\u0007\u0001\u0000\u0000\u0380\u0381\u0007\u0012\u0000\u0000\u0381\u0382"+ - "\u0007\u0003\u0000\u0000\u0382\u008f\u0001\u0000\u0000\u0000\u0383\u0384"+ - "\u0005)\u0000\u0000\u0384\u0091\u0001\u0000\u0000\u0000\u0385\u0386\u0007"+ - "\u0005\u0000\u0000\u0386\u0387\u0007\u0006\u0000\u0000\u0387\u0388\u0007"+ - "\u0014\u0000\u0000\u0388\u0389\u0007\u0003\u0000\u0000\u0389\u0093\u0001"+ - "\u0000\u0000\u0000\u038a\u038b\u0005=\u0000\u0000\u038b\u038c\u0005=\u0000"+ - "\u0000\u038c\u0095\u0001\u0000\u0000\u0000\u038d\u038e\u0005=\u0000\u0000"+ - "\u038e\u038f\u0005~\u0000\u0000\u038f\u0097\u0001\u0000\u0000\u0000\u0390"+ - "\u0391\u0005!\u0000\u0000\u0391\u0392\u0005=\u0000\u0000\u0392\u0099\u0001"+ - "\u0000\u0000\u0000\u0393\u0394\u0005<\u0000\u0000\u0394\u009b\u0001\u0000"+ - "\u0000\u0000\u0395\u0396\u0005<\u0000\u0000\u0396\u0397\u0005=\u0000\u0000"+ - "\u0397\u009d\u0001\u0000\u0000\u0000\u0398\u0399\u0005>\u0000\u0000\u0399"+ - "\u009f\u0001\u0000\u0000\u0000\u039a\u039b\u0005>\u0000\u0000\u039b\u039c"+ - "\u0005=\u0000\u0000\u039c\u00a1\u0001\u0000\u0000\u0000\u039d\u039e\u0005"+ - "+\u0000\u0000\u039e\u00a3\u0001\u0000\u0000\u0000\u039f\u03a0\u0005-\u0000"+ - "\u0000\u03a0\u00a5\u0001\u0000\u0000\u0000\u03a1\u03a2\u0005*\u0000\u0000"+ - "\u03a2\u00a7\u0001\u0000\u0000\u0000\u03a3\u03a4\u0005/\u0000\u0000\u03a4"+ - "\u00a9\u0001\u0000\u0000\u0000\u03a5\u03a6\u0005%\u0000\u0000\u03a6\u00ab"+ - "\u0001\u0000\u0000\u0000\u03a7\u03a8\u0003.\u000f\u0000\u03a8\u03a9\u0001"+ - "\u0000\u0000\u0000\u03a9\u03aa\u0006N\r\u0000\u03aa\u00ad\u0001\u0000"+ - "\u0000\u0000\u03ab\u03ae\u0003\u008c>\u0000\u03ac\u03af\u0003L\u001e\u0000"+ - "\u03ad\u03af\u0003Z%\u0000\u03ae\u03ac\u0001\u0000\u0000\u0000\u03ae\u03ad"+ - "\u0001\u0000\u0000\u0000\u03af\u03b3\u0001\u0000\u0000\u0000\u03b0\u03b2"+ - "\u0003\\&\u0000\u03b1\u03b0\u0001\u0000\u0000\u0000\u03b2\u03b5\u0001"+ - "\u0000\u0000\u0000\u03b3\u03b1\u0001\u0000\u0000\u0000\u03b3\u03b4\u0001"+ - "\u0000\u0000\u0000\u03b4\u03bd\u0001\u0000\u0000\u0000\u03b5\u03b3\u0001"+ - "\u0000\u0000\u0000\u03b6\u03b8\u0003\u008c>\u0000\u03b7\u03b9\u0003J\u001d"+ - "\u0000\u03b8\u03b7\u0001\u0000\u0000\u0000\u03b9\u03ba\u0001\u0000\u0000"+ - "\u0000\u03ba\u03b8\u0001\u0000\u0000\u0000\u03ba\u03bb\u0001\u0000\u0000"+ - "\u0000\u03bb\u03bd\u0001\u0000\u0000\u0000\u03bc\u03ab\u0001\u0000\u0000"+ - "\u0000\u03bc\u03b6\u0001\u0000\u0000\u0000\u03bd\u00af\u0001\u0000\u0000"+ - "\u0000\u03be\u03bf\u0005[\u0000\u0000\u03bf\u03c0\u0001\u0000\u0000\u0000"+ - "\u03c0\u03c1\u0006P\u0000\u0000\u03c1\u03c2\u0006P\u0000\u0000\u03c2\u00b1"+ - "\u0001\u0000\u0000\u0000\u03c3\u03c4\u0005]\u0000\u0000\u03c4\u03c5\u0001"+ - "\u0000\u0000\u0000\u03c5\u03c6\u0006Q\f\u0000\u03c6\u03c7\u0006Q\f\u0000"+ - "\u03c7\u00b3\u0001\u0000\u0000\u0000\u03c8\u03cc\u0003L\u001e\u0000\u03c9"+ - "\u03cb\u0003\\&\u0000\u03ca\u03c9\u0001\u0000\u0000\u0000\u03cb\u03ce"+ - "\u0001\u0000\u0000\u0000\u03cc\u03ca\u0001\u0000\u0000\u0000\u03cc\u03cd"+ - "\u0001\u0000\u0000\u0000\u03cd\u03d9\u0001\u0000\u0000\u0000\u03ce\u03cc"+ - "\u0001\u0000\u0000\u0000\u03cf\u03d2\u0003Z%\u0000\u03d0\u03d2\u0003T"+ - "\"\u0000\u03d1\u03cf\u0001\u0000\u0000\u0000\u03d1\u03d0\u0001\u0000\u0000"+ - "\u0000\u03d2\u03d4\u0001\u0000\u0000\u0000\u03d3\u03d5\u0003\\&\u0000"+ - "\u03d4\u03d3\u0001\u0000\u0000\u0000\u03d5\u03d6\u0001\u0000\u0000\u0000"+ - "\u03d6\u03d4\u0001\u0000\u0000\u0000\u03d6\u03d7\u0001\u0000\u0000\u0000"+ - "\u03d7\u03d9\u0001\u0000\u0000\u0000\u03d8\u03c8\u0001\u0000\u0000\u0000"+ - "\u03d8\u03d1\u0001\u0000\u0000\u0000\u03d9\u00b5\u0001\u0000\u0000\u0000"+ - "\u03da\u03dc\u0003V#\u0000\u03db\u03dd\u0003X$\u0000\u03dc\u03db\u0001"+ - "\u0000\u0000\u0000\u03dd\u03de\u0001\u0000\u0000\u0000\u03de\u03dc\u0001"+ - "\u0000\u0000\u0000\u03de\u03df\u0001\u0000\u0000\u0000\u03df\u03e0\u0001"+ - "\u0000\u0000\u0000\u03e0\u03e1\u0003V#\u0000\u03e1\u00b7\u0001\u0000\u0000"+ - "\u0000\u03e2\u03e3\u0003\u00b6S\u0000\u03e3\u00b9\u0001\u0000\u0000\u0000"+ - "\u03e4\u03e5\u0003B\u0019\u0000\u03e5\u03e6\u0001\u0000\u0000\u0000\u03e6"+ - "\u03e7\u0006U\u000b\u0000\u03e7\u00bb\u0001\u0000\u0000\u0000\u03e8\u03e9"+ - "\u0003D\u001a\u0000\u03e9\u03ea\u0001\u0000\u0000\u0000\u03ea\u03eb\u0006"+ - "V\u000b\u0000\u03eb\u00bd\u0001\u0000\u0000\u0000\u03ec\u03ed\u0003F\u001b"+ - "\u0000\u03ed\u03ee\u0001\u0000\u0000\u0000\u03ee\u03ef\u0006W\u000b\u0000"+ - "\u03ef\u00bf\u0001\u0000\u0000\u0000\u03f0\u03f1\u0003\u00b0P\u0000\u03f1"+ - "\u03f2\u0001\u0000\u0000\u0000\u03f2\u03f3\u0006X\u000e\u0000\u03f3\u03f4"+ - "\u0006X\u000f\u0000\u03f4\u00c1\u0001\u0000\u0000\u0000\u03f5\u03f6\u0003"+ - "H\u001c\u0000\u03f6\u03f7\u0001\u0000\u0000\u0000\u03f7\u03f8\u0006Y\u0010"+ - "\u0000\u03f8\u03f9\u0006Y\f\u0000\u03f9\u00c3\u0001\u0000\u0000\u0000"+ - "\u03fa\u03fb\u0003F\u001b\u0000\u03fb\u03fc\u0001\u0000\u0000\u0000\u03fc"+ - "\u03fd\u0006Z\u000b\u0000\u03fd\u00c5\u0001\u0000\u0000\u0000\u03fe\u03ff"+ - "\u0003B\u0019\u0000\u03ff\u0400\u0001\u0000\u0000\u0000\u0400\u0401\u0006"+ - "[\u000b\u0000\u0401\u00c7\u0001\u0000\u0000\u0000\u0402\u0403\u0003D\u001a"+ - "\u0000\u0403\u0404\u0001\u0000\u0000\u0000\u0404\u0405\u0006\\\u000b\u0000"+ - "\u0405\u00c9\u0001\u0000\u0000\u0000\u0406\u0407\u0003H\u001c\u0000\u0407"+ - "\u0408\u0001\u0000\u0000\u0000\u0408\u0409\u0006]\u0010\u0000\u0409\u040a"+ - "\u0006]\f\u0000\u040a\u00cb\u0001\u0000\u0000\u0000\u040b\u040c\u0003"+ - "\u00b0P\u0000\u040c\u040d\u0001\u0000\u0000\u0000\u040d\u040e\u0006^\u000e"+ - "\u0000\u040e\u00cd\u0001\u0000\u0000\u0000\u040f\u0410\u0003\u00b2Q\u0000"+ - "\u0410\u0411\u0001\u0000\u0000\u0000\u0411\u0412\u0006_\u0011\u0000\u0412"+ - "\u00cf\u0001\u0000\u0000\u0000\u0413\u0414\u0003n/\u0000\u0414\u0415\u0001"+ - "\u0000\u0000\u0000\u0415\u0416\u0006`\u0012\u0000\u0416\u00d1\u0001\u0000"+ - "\u0000\u0000\u0417\u0418\u0003p0\u0000\u0418\u0419\u0001\u0000\u0000\u0000"+ - "\u0419\u041a\u0006a\u0013\u0000\u041a\u00d3\u0001\u0000\u0000\u0000\u041b"+ - "\u041c\u0003j-\u0000\u041c\u041d\u0001\u0000\u0000\u0000\u041d\u041e\u0006"+ - "b\u0014\u0000\u041e\u00d5\u0001\u0000\u0000\u0000\u041f\u0420\u0007\u0010"+ - "\u0000\u0000\u0420\u0421\u0007\u0003\u0000\u0000\u0421\u0422\u0007\u0005"+ - "\u0000\u0000\u0422\u0423\u0007\f\u0000\u0000\u0423\u0424\u0007\u0000\u0000"+ - "\u0000\u0424\u0425\u0007\f\u0000\u0000\u0425\u0426\u0007\u0005\u0000\u0000"+ - "\u0426\u0427\u0007\f\u0000\u0000\u0427\u00d7\u0001\u0000\u0000\u0000\u0428"+ - "\u042c\b!\u0000\u0000\u0429\u042a\u0005/\u0000\u0000\u042a\u042c\b\"\u0000"+ - "\u0000\u042b\u0428\u0001\u0000\u0000\u0000\u042b\u0429\u0001\u0000\u0000"+ - "\u0000\u042c\u00d9\u0001\u0000\u0000\u0000\u042d\u042f\u0003\u00d8d\u0000"+ - "\u042e\u042d\u0001\u0000\u0000\u0000\u042f\u0430\u0001\u0000\u0000\u0000"+ - "\u0430\u042e\u0001\u0000\u0000\u0000\u0430\u0431\u0001\u0000\u0000\u0000"+ - "\u0431\u00db\u0001\u0000\u0000\u0000\u0432\u0433\u0003\u00dae\u0000\u0433"+ - "\u0434\u0001\u0000\u0000\u0000\u0434\u0435\u0006f\u0015\u0000\u0435\u00dd"+ - "\u0001\u0000\u0000\u0000\u0436\u0437\u0003^\'\u0000\u0437\u0438\u0001"+ - "\u0000\u0000\u0000\u0438\u0439\u0006g\u0016\u0000\u0439\u00df\u0001\u0000"+ - "\u0000\u0000\u043a\u043b\u0003B\u0019\u0000\u043b\u043c\u0001\u0000\u0000"+ - "\u0000\u043c\u043d\u0006h\u000b\u0000\u043d\u00e1\u0001\u0000\u0000\u0000"+ - "\u043e\u043f\u0003D\u001a\u0000\u043f\u0440\u0001\u0000\u0000\u0000\u0440"+ - "\u0441\u0006i\u000b\u0000\u0441\u00e3\u0001\u0000\u0000\u0000\u0442\u0443"+ - "\u0003F\u001b\u0000\u0443\u0444\u0001\u0000\u0000\u0000\u0444\u0445\u0006"+ - "j\u000b\u0000\u0445\u00e5\u0001\u0000\u0000\u0000\u0446\u0447\u0003H\u001c"+ - "\u0000\u0447\u0448\u0001\u0000\u0000\u0000\u0448\u0449\u0006k\u0010\u0000"+ - "\u0449\u044a\u0006k\f\u0000\u044a\u00e7\u0001\u0000\u0000\u0000\u044b"+ - "\u044c\u0003t2\u0000\u044c\u044d\u0001\u0000\u0000\u0000\u044d\u044e\u0006"+ - "l\u0017\u0000\u044e\u00e9\u0001\u0000\u0000\u0000\u044f\u0450\u0003p0"+ - "\u0000\u0450\u0451\u0001\u0000\u0000\u0000\u0451\u0452\u0006m\u0013\u0000"+ - "\u0452\u00eb\u0001\u0000\u0000\u0000\u0453\u0454\u0004n\b\u0000\u0454"+ - "\u0455\u0003\u008c>\u0000\u0455\u0456\u0001\u0000\u0000\u0000\u0456\u0457"+ - "\u0006n\u0018\u0000\u0457\u00ed\u0001\u0000\u0000\u0000\u0458\u0459\u0004"+ - "o\t\u0000\u0459\u045a\u0003\u00aeO\u0000\u045a\u045b\u0001\u0000\u0000"+ - "\u0000\u045b\u045c\u0006o\u0019\u0000\u045c\u00ef\u0001\u0000\u0000\u0000"+ - "\u045d\u0462\u0003L\u001e\u0000\u045e\u0462\u0003J\u001d\u0000\u045f\u0462"+ - "\u0003Z%\u0000\u0460\u0462\u0003\u00a6K\u0000\u0461\u045d\u0001\u0000"+ - "\u0000\u0000\u0461\u045e\u0001\u0000\u0000\u0000\u0461\u045f\u0001\u0000"+ - "\u0000\u0000\u0461\u0460\u0001\u0000\u0000\u0000\u0462\u00f1\u0001\u0000"+ - "\u0000\u0000\u0463\u0466\u0003L\u001e\u0000\u0464\u0466\u0003\u00a6K\u0000"+ - "\u0465\u0463\u0001\u0000\u0000\u0000\u0465\u0464\u0001\u0000\u0000\u0000"+ - "\u0466\u046a\u0001\u0000\u0000\u0000\u0467\u0469\u0003\u00f0p\u0000\u0468"+ - "\u0467\u0001\u0000\u0000\u0000\u0469\u046c\u0001\u0000\u0000\u0000\u046a"+ - "\u0468\u0001\u0000\u0000\u0000\u046a\u046b\u0001\u0000\u0000\u0000\u046b"+ - "\u0477\u0001\u0000\u0000\u0000\u046c\u046a\u0001\u0000\u0000\u0000\u046d"+ - "\u0470\u0003Z%\u0000\u046e\u0470\u0003T\"\u0000\u046f\u046d\u0001\u0000"+ - "\u0000\u0000\u046f\u046e\u0001\u0000\u0000\u0000\u0470\u0472\u0001\u0000"+ - "\u0000\u0000\u0471\u0473\u0003\u00f0p\u0000\u0472\u0471\u0001\u0000\u0000"+ - "\u0000\u0473\u0474\u0001\u0000\u0000\u0000\u0474\u0472\u0001\u0000\u0000"+ - "\u0000\u0474\u0475\u0001\u0000\u0000\u0000\u0475\u0477\u0001\u0000\u0000"+ - "\u0000\u0476\u0465\u0001\u0000\u0000\u0000\u0476\u046f\u0001\u0000\u0000"+ - "\u0000\u0477\u00f3\u0001\u0000\u0000\u0000\u0478\u047b\u0003\u00f2q\u0000"+ - "\u0479\u047b\u0003\u00b6S\u0000\u047a\u0478\u0001\u0000\u0000\u0000\u047a"+ - "\u0479\u0001\u0000\u0000\u0000\u047b\u047c\u0001\u0000\u0000\u0000\u047c"+ - "\u047a\u0001\u0000\u0000\u0000\u047c\u047d\u0001\u0000\u0000\u0000\u047d"+ - "\u00f5\u0001\u0000\u0000\u0000\u047e\u047f\u0003B\u0019\u0000\u047f\u0480"+ - "\u0001\u0000\u0000\u0000\u0480\u0481\u0006s\u000b\u0000\u0481\u00f7\u0001"+ - "\u0000\u0000\u0000\u0482\u0483\u0003D\u001a\u0000\u0483\u0484\u0001\u0000"+ - "\u0000\u0000\u0484\u0485\u0006t\u000b\u0000\u0485\u00f9\u0001\u0000\u0000"+ - "\u0000\u0486\u0487\u0003F\u001b\u0000\u0487\u0488\u0001\u0000\u0000\u0000"+ - "\u0488\u0489\u0006u\u000b\u0000\u0489\u00fb\u0001\u0000\u0000\u0000\u048a"+ - "\u048b\u0003H\u001c\u0000\u048b\u048c\u0001\u0000\u0000\u0000\u048c\u048d"+ - "\u0006v\u0010\u0000\u048d\u048e\u0006v\f\u0000\u048e\u00fd\u0001\u0000"+ - "\u0000\u0000\u048f\u0490\u0003j-\u0000\u0490\u0491\u0001\u0000\u0000\u0000"+ - "\u0491\u0492\u0006w\u0014\u0000\u0492\u00ff\u0001\u0000\u0000\u0000\u0493"+ - "\u0494\u0003p0\u0000\u0494\u0495\u0001\u0000\u0000\u0000\u0495\u0496\u0006"+ - "x\u0013\u0000\u0496\u0101\u0001\u0000\u0000\u0000\u0497\u0498\u0003t2"+ - "\u0000\u0498\u0499\u0001\u0000\u0000\u0000\u0499\u049a\u0006y\u0017\u0000"+ - "\u049a\u0103\u0001\u0000\u0000\u0000\u049b\u049c\u0004z\n\u0000\u049c"+ - "\u049d\u0003\u008c>\u0000\u049d\u049e\u0001\u0000\u0000\u0000\u049e\u049f"+ - "\u0006z\u0018\u0000\u049f\u0105\u0001\u0000\u0000\u0000\u04a0\u04a1\u0004"+ - "{\u000b\u0000\u04a1\u04a2\u0003\u00aeO\u0000\u04a2\u04a3\u0001\u0000\u0000"+ - "\u0000\u04a3\u04a4\u0006{\u0019\u0000\u04a4\u0107\u0001\u0000\u0000\u0000"+ - "\u04a5\u04a6\u0007\f\u0000\u0000\u04a6\u04a7\u0007\u0002\u0000\u0000\u04a7"+ - "\u0109\u0001\u0000\u0000\u0000\u04a8\u04a9\u0003\u00f4r\u0000\u04a9\u04aa"+ - "\u0001\u0000\u0000\u0000\u04aa\u04ab\u0006}\u001a\u0000\u04ab\u010b\u0001"+ - "\u0000\u0000\u0000\u04ac\u04ad\u0003B\u0019\u0000\u04ad\u04ae\u0001\u0000"+ - "\u0000\u0000\u04ae\u04af\u0006~\u000b\u0000\u04af\u010d\u0001\u0000\u0000"+ - "\u0000\u04b0\u04b1\u0003D\u001a\u0000\u04b1\u04b2\u0001\u0000\u0000\u0000"+ - "\u04b2\u04b3\u0006\u007f\u000b\u0000\u04b3\u010f\u0001\u0000\u0000\u0000"+ - "\u04b4\u04b5\u0003F\u001b\u0000\u04b5\u04b6\u0001\u0000\u0000\u0000\u04b6"+ - "\u04b7\u0006\u0080\u000b\u0000\u04b7\u0111\u0001\u0000\u0000\u0000\u04b8"+ - "\u04b9\u0003H\u001c\u0000\u04b9\u04ba\u0001\u0000\u0000\u0000\u04ba\u04bb"+ - "\u0006\u0081\u0010\u0000\u04bb\u04bc\u0006\u0081\f\u0000\u04bc\u0113\u0001"+ - "\u0000\u0000\u0000\u04bd\u04be\u0003\u00b0P\u0000\u04be\u04bf\u0001\u0000"+ - "\u0000\u0000\u04bf\u04c0\u0006\u0082\u000e\u0000\u04c0\u04c1\u0006\u0082"+ - "\u001b\u0000\u04c1\u0115\u0001\u0000\u0000\u0000\u04c2\u04c3\u0007\u0007"+ - "\u0000\u0000\u04c3\u04c4\u0007\t\u0000\u0000\u04c4\u04c5\u0001\u0000\u0000"+ - "\u0000\u04c5\u04c6\u0006\u0083\u001c\u0000\u04c6\u0117\u0001\u0000\u0000"+ - "\u0000\u04c7\u04c8\u0007\u0013\u0000\u0000\u04c8\u04c9\u0007\u0001\u0000"+ - "\u0000\u04c9\u04ca\u0007\u0005\u0000\u0000\u04ca\u04cb\u0007\n\u0000\u0000"+ - "\u04cb\u04cc\u0001\u0000\u0000\u0000\u04cc\u04cd\u0006\u0084\u001c\u0000"+ - "\u04cd\u0119\u0001\u0000\u0000\u0000\u04ce\u04cf\b#\u0000\u0000\u04cf"+ - "\u011b\u0001\u0000\u0000\u0000\u04d0\u04d2\u0003\u011a\u0085\u0000\u04d1"+ - "\u04d0\u0001\u0000\u0000\u0000\u04d2\u04d3\u0001\u0000\u0000\u0000\u04d3"+ - "\u04d1\u0001\u0000\u0000\u0000\u04d3\u04d4\u0001\u0000\u0000\u0000\u04d4"+ - "\u04d5\u0001\u0000\u0000\u0000\u04d5\u04d6\u0003n/\u0000\u04d6\u04d8\u0001"+ - "\u0000\u0000\u0000\u04d7\u04d1\u0001\u0000\u0000\u0000\u04d7\u04d8\u0001"+ - "\u0000\u0000\u0000\u04d8\u04da\u0001\u0000\u0000\u0000\u04d9\u04db\u0003"+ - "\u011a\u0085\u0000\u04da\u04d9\u0001\u0000\u0000\u0000\u04db\u04dc\u0001"+ - "\u0000\u0000\u0000\u04dc\u04da\u0001\u0000\u0000\u0000\u04dc\u04dd\u0001"+ - "\u0000\u0000\u0000\u04dd\u011d\u0001\u0000\u0000\u0000\u04de\u04df\u0003"+ - "\u011c\u0086\u0000\u04df\u04e0\u0001\u0000\u0000\u0000\u04e0\u04e1\u0006"+ - "\u0087\u001d\u0000\u04e1\u011f\u0001\u0000\u0000\u0000\u04e2\u04e3\u0003"+ - "B\u0019\u0000\u04e3\u04e4\u0001\u0000\u0000\u0000\u04e4\u04e5\u0006\u0088"+ - "\u000b\u0000\u04e5\u0121\u0001\u0000\u0000\u0000\u04e6\u04e7\u0003D\u001a"+ - "\u0000\u04e7\u04e8\u0001\u0000\u0000\u0000\u04e8\u04e9\u0006\u0089\u000b"+ - "\u0000\u04e9\u0123\u0001\u0000\u0000\u0000\u04ea\u04eb\u0003F\u001b\u0000"+ - "\u04eb\u04ec\u0001\u0000\u0000\u0000\u04ec\u04ed\u0006\u008a\u000b\u0000"+ - "\u04ed\u0125\u0001\u0000\u0000\u0000\u04ee\u04ef\u0003H\u001c\u0000\u04ef"+ - "\u04f0\u0001\u0000\u0000\u0000\u04f0\u04f1\u0006\u008b\u0010\u0000\u04f1"+ - "\u04f2\u0006\u008b\f\u0000\u04f2\u04f3\u0006\u008b\f\u0000\u04f3\u0127"+ - "\u0001\u0000\u0000\u0000\u04f4\u04f5\u0003j-\u0000\u04f5\u04f6\u0001\u0000"+ - "\u0000\u0000\u04f6\u04f7\u0006\u008c\u0014\u0000\u04f7\u0129\u0001\u0000"+ - "\u0000\u0000\u04f8\u04f9\u0003p0\u0000\u04f9\u04fa\u0001\u0000\u0000\u0000"+ - "\u04fa\u04fb\u0006\u008d\u0013\u0000\u04fb\u012b\u0001\u0000\u0000\u0000"+ - "\u04fc\u04fd\u0003t2\u0000\u04fd\u04fe\u0001\u0000\u0000\u0000\u04fe\u04ff"+ - "\u0006\u008e\u0017\u0000\u04ff\u012d\u0001\u0000\u0000\u0000\u0500\u0501"+ - "\u0003\u0118\u0084\u0000\u0501\u0502\u0001\u0000\u0000\u0000\u0502\u0503"+ - "\u0006\u008f\u001e\u0000\u0503\u012f\u0001\u0000\u0000\u0000\u0504\u0505"+ - "\u0003\u00f4r\u0000\u0505\u0506\u0001\u0000\u0000\u0000\u0506\u0507\u0006"+ - "\u0090\u001a\u0000\u0507\u0131\u0001\u0000\u0000\u0000\u0508\u0509\u0003"+ - "\u00b8T\u0000\u0509\u050a\u0001\u0000\u0000\u0000\u050a\u050b\u0006\u0091"+ - "\u001f\u0000\u050b\u0133\u0001\u0000\u0000\u0000\u050c\u050d\u0004\u0092"+ - "\f\u0000\u050d\u050e\u0003\u008c>\u0000\u050e\u050f\u0001\u0000\u0000"+ - "\u0000\u050f\u0510\u0006\u0092\u0018\u0000\u0510\u0135\u0001\u0000\u0000"+ - "\u0000\u0511\u0512\u0004\u0093\r\u0000\u0512\u0513\u0003\u00aeO\u0000"+ - "\u0513\u0514\u0001\u0000\u0000\u0000\u0514\u0515\u0006\u0093\u0019\u0000"+ - "\u0515\u0137\u0001\u0000\u0000\u0000\u0516\u0517\u0003B\u0019\u0000\u0517"+ - "\u0518\u0001\u0000\u0000\u0000\u0518\u0519\u0006\u0094\u000b\u0000\u0519"+ - "\u0139\u0001\u0000\u0000\u0000\u051a\u051b\u0003D\u001a\u0000\u051b\u051c"+ - "\u0001\u0000\u0000\u0000\u051c\u051d\u0006\u0095\u000b\u0000\u051d\u013b"+ - "\u0001\u0000\u0000\u0000\u051e\u051f\u0003F\u001b\u0000\u051f\u0520\u0001"+ - "\u0000\u0000\u0000\u0520\u0521\u0006\u0096\u000b\u0000\u0521\u013d\u0001"+ - "\u0000\u0000\u0000\u0522\u0523\u0003H\u001c\u0000\u0523\u0524\u0001\u0000"+ - "\u0000\u0000\u0524\u0525\u0006\u0097\u0010\u0000\u0525\u0526\u0006\u0097"+ - "\f\u0000\u0526\u013f\u0001\u0000\u0000\u0000\u0527\u0528\u0003t2\u0000"+ - "\u0528\u0529\u0001\u0000\u0000\u0000\u0529\u052a\u0006\u0098\u0017\u0000"+ - "\u052a\u0141\u0001\u0000\u0000\u0000\u052b\u052c\u0004\u0099\u000e\u0000"+ - "\u052c\u052d\u0003\u008c>\u0000\u052d\u052e\u0001\u0000\u0000\u0000\u052e"+ - "\u052f\u0006\u0099\u0018\u0000\u052f\u0143\u0001\u0000\u0000\u0000\u0530"+ - "\u0531\u0004\u009a\u000f\u0000\u0531\u0532\u0003\u00aeO\u0000\u0532\u0533"+ - "\u0001\u0000\u0000\u0000\u0533\u0534\u0006\u009a\u0019\u0000\u0534\u0145"+ - "\u0001\u0000\u0000\u0000\u0535\u0536\u0003\u00b8T\u0000\u0536\u0537\u0001"+ - "\u0000\u0000\u0000\u0537\u0538\u0006\u009b\u001f\u0000\u0538\u0147\u0001"+ - "\u0000\u0000\u0000\u0539\u053a\u0003\u00b4R\u0000\u053a\u053b\u0001\u0000"+ - "\u0000\u0000\u053b\u053c\u0006\u009c \u0000\u053c\u0149\u0001\u0000\u0000"+ - "\u0000\u053d\u053e\u0003B\u0019\u0000\u053e\u053f\u0001\u0000\u0000\u0000"+ - "\u053f\u0540\u0006\u009d\u000b\u0000\u0540\u014b\u0001\u0000\u0000\u0000"+ - "\u0541\u0542\u0003D\u001a\u0000\u0542\u0543\u0001\u0000\u0000\u0000\u0543"+ - "\u0544\u0006\u009e\u000b\u0000\u0544\u014d\u0001\u0000\u0000\u0000\u0545"+ - "\u0546\u0003F\u001b\u0000\u0546\u0547\u0001\u0000\u0000\u0000\u0547\u0548"+ - "\u0006\u009f\u000b\u0000\u0548\u014f\u0001\u0000\u0000\u0000\u0549\u054a"+ - "\u0003H\u001c\u0000\u054a\u054b\u0001\u0000\u0000\u0000\u054b\u054c\u0006"+ - "\u00a0\u0010\u0000\u054c\u054d\u0006\u00a0\f\u0000\u054d\u0151\u0001\u0000"+ - "\u0000\u0000\u054e\u054f\u0007\u0001\u0000\u0000\u054f\u0550\u0007\t\u0000"+ - "\u0000\u0550\u0551\u0007\u000f\u0000\u0000\u0551\u0552\u0007\u0007\u0000"+ - "\u0000\u0552\u0153\u0001\u0000\u0000\u0000\u0553\u0554\u0003B\u0019\u0000"+ - "\u0554\u0555\u0001\u0000\u0000\u0000\u0555\u0556\u0006\u00a2\u000b\u0000"+ - "\u0556\u0155\u0001\u0000\u0000\u0000\u0557\u0558\u0003D\u001a\u0000\u0558"+ - "\u0559\u0001\u0000\u0000\u0000\u0559\u055a\u0006\u00a3\u000b\u0000\u055a"+ - "\u0157\u0001\u0000\u0000\u0000\u055b\u055c\u0003F\u001b\u0000\u055c\u055d"+ - "\u0001\u0000\u0000\u0000\u055d\u055e\u0006\u00a4\u000b\u0000\u055e\u0159"+ - "\u0001\u0000\u0000\u0000\u055f\u0560\u0003\u00b2Q\u0000\u0560\u0561\u0001"+ - "\u0000\u0000\u0000\u0561\u0562\u0006\u00a5\u0011\u0000\u0562\u0563\u0006"+ - "\u00a5\f\u0000\u0563\u015b\u0001\u0000\u0000\u0000\u0564\u0565\u0003n"+ - "/\u0000\u0565\u0566\u0001\u0000\u0000\u0000\u0566\u0567\u0006\u00a6\u0012"+ - "\u0000\u0567\u015d\u0001\u0000\u0000\u0000\u0568\u056e\u0003T\"\u0000"+ - "\u0569\u056e\u0003J\u001d\u0000\u056a\u056e\u0003t2\u0000\u056b\u056e"+ - "\u0003L\u001e\u0000\u056c\u056e\u0003Z%\u0000\u056d\u0568\u0001\u0000"+ - "\u0000\u0000\u056d\u0569\u0001\u0000\u0000\u0000\u056d\u056a\u0001\u0000"+ - "\u0000\u0000\u056d\u056b\u0001\u0000\u0000\u0000\u056d\u056c\u0001\u0000"+ - "\u0000\u0000\u056e\u056f\u0001\u0000\u0000\u0000\u056f\u056d\u0001\u0000"+ - "\u0000\u0000\u056f\u0570\u0001\u0000\u0000\u0000\u0570\u015f\u0001\u0000"+ - "\u0000\u0000\u0571\u0572\u0003B\u0019\u0000\u0572\u0573\u0001\u0000\u0000"+ - "\u0000\u0573\u0574\u0006\u00a8\u000b\u0000\u0574\u0161\u0001\u0000\u0000"+ - "\u0000\u0575\u0576\u0003D\u001a\u0000\u0576\u0577\u0001\u0000\u0000\u0000"+ - "\u0577\u0578\u0006\u00a9\u000b\u0000\u0578\u0163\u0001\u0000\u0000\u0000"+ - "\u0579\u057a\u0003F\u001b\u0000\u057a\u057b\u0001\u0000\u0000\u0000\u057b"+ - "\u057c\u0006\u00aa\u000b\u0000\u057c\u0165\u0001\u0000\u0000\u0000\u057d"+ - "\u057e\u0003H\u001c\u0000\u057e\u057f\u0001\u0000\u0000\u0000\u057f\u0580"+ - "\u0006\u00ab\u0010\u0000\u0580\u0581\u0006\u00ab\f\u0000\u0581\u0167\u0001"+ - "\u0000\u0000\u0000\u0582\u0583\u0003n/\u0000\u0583\u0584\u0001\u0000\u0000"+ - "\u0000\u0584\u0585\u0006\u00ac\u0012\u0000\u0585\u0169\u0001\u0000\u0000"+ - "\u0000\u0586\u0587\u0003p0\u0000\u0587\u0588\u0001\u0000\u0000\u0000\u0588"+ - "\u0589\u0006\u00ad\u0013\u0000\u0589\u016b\u0001\u0000\u0000\u0000\u058a"+ - "\u058b\u0003t2\u0000\u058b\u058c\u0001\u0000\u0000\u0000\u058c\u058d\u0006"+ - "\u00ae\u0017\u0000\u058d\u016d\u0001\u0000\u0000\u0000\u058e\u058f\u0003"+ - "\u0116\u0083\u0000\u058f\u0590\u0001\u0000\u0000\u0000\u0590\u0591\u0006"+ - "\u00af!\u0000\u0591\u0592\u0006\u00af\"\u0000\u0592\u016f\u0001\u0000"+ - "\u0000\u0000\u0593\u0594\u0003\u00dae\u0000\u0594\u0595\u0001\u0000\u0000"+ - "\u0000\u0595\u0596\u0006\u00b0\u0015\u0000\u0596\u0171\u0001\u0000\u0000"+ - "\u0000\u0597\u0598\u0003^\'\u0000\u0598\u0599\u0001\u0000\u0000\u0000"+ - "\u0599\u059a\u0006\u00b1\u0016\u0000\u059a\u0173\u0001\u0000\u0000\u0000"+ - "\u059b\u059c\u0003B\u0019\u0000\u059c\u059d\u0001\u0000\u0000\u0000\u059d"+ - "\u059e\u0006\u00b2\u000b\u0000\u059e\u0175\u0001\u0000\u0000\u0000\u059f"+ - "\u05a0\u0003D\u001a\u0000\u05a0\u05a1\u0001\u0000\u0000\u0000\u05a1\u05a2"+ - "\u0006\u00b3\u000b\u0000\u05a2\u0177\u0001\u0000\u0000\u0000\u05a3\u05a4"+ - "\u0003F\u001b\u0000\u05a4\u05a5\u0001\u0000\u0000\u0000\u05a5\u05a6\u0006"+ - "\u00b4\u000b\u0000\u05a6\u0179\u0001\u0000\u0000\u0000\u05a7\u05a8\u0003"+ - "H\u001c\u0000\u05a8\u05a9\u0001\u0000\u0000\u0000\u05a9\u05aa\u0006\u00b5"+ - "\u0010\u0000\u05aa\u05ab\u0006\u00b5\f\u0000\u05ab\u05ac\u0006\u00b5\f"+ - "\u0000\u05ac\u017b\u0001\u0000\u0000\u0000\u05ad\u05ae\u0003p0\u0000\u05ae"+ - "\u05af\u0001\u0000\u0000\u0000\u05af\u05b0\u0006\u00b6\u0013\u0000\u05b0"+ - "\u017d\u0001\u0000\u0000\u0000\u05b1\u05b2\u0003t2\u0000\u05b2\u05b3\u0001"+ - "\u0000\u0000\u0000\u05b3\u05b4\u0006\u00b7\u0017\u0000\u05b4\u017f\u0001"+ - "\u0000\u0000\u0000\u05b5\u05b6\u0003\u00f4r\u0000\u05b6\u05b7\u0001\u0000"+ - "\u0000\u0000\u05b7\u05b8\u0006\u00b8\u001a\u0000\u05b8\u0181\u0001\u0000"+ - "\u0000\u0000\u05b9\u05ba\u0003B\u0019\u0000\u05ba\u05bb\u0001\u0000\u0000"+ - "\u0000\u05bb\u05bc\u0006\u00b9\u000b\u0000\u05bc\u0183\u0001\u0000\u0000"+ - "\u0000\u05bd\u05be\u0003D\u001a\u0000\u05be\u05bf\u0001\u0000\u0000\u0000"+ - "\u05bf\u05c0\u0006\u00ba\u000b\u0000\u05c0\u0185\u0001\u0000\u0000\u0000"+ - "\u05c1\u05c2\u0003F\u001b\u0000\u05c2\u05c3\u0001\u0000\u0000\u0000\u05c3"+ - "\u05c4\u0006\u00bb\u000b\u0000\u05c4\u0187\u0001\u0000\u0000\u0000\u05c5"+ - "\u05c6\u0003H\u001c\u0000\u05c6\u05c7\u0001\u0000\u0000\u0000\u05c7\u05c8"+ - "\u0006\u00bc\u0010\u0000\u05c8\u05c9\u0006\u00bc\f\u0000\u05c9\u0189\u0001"+ - "\u0000\u0000\u0000\u05ca\u05cb\u00036\u0013\u0000\u05cb\u05cc\u0001\u0000"+ - "\u0000\u0000\u05cc\u05cd\u0006\u00bd#\u0000\u05cd\u018b\u0001\u0000\u0000"+ - "\u0000\u05ce\u05cf\u0003\u0108|\u0000\u05cf\u05d0\u0001\u0000\u0000\u0000"+ - "\u05d0\u05d1\u0006\u00be$\u0000\u05d1\u018d\u0001\u0000\u0000\u0000\u05d2"+ - "\u05d3\u0003\u0116\u0083\u0000\u05d3\u05d4\u0001\u0000\u0000\u0000\u05d4"+ - "\u05d5\u0006\u00bf!\u0000\u05d5\u05d6\u0006\u00bf\f\u0000\u05d6\u05d7"+ - "\u0006\u00bf\u0000\u0000\u05d7\u018f\u0001\u0000\u0000\u0000\u05d8\u05d9"+ - "\u0007\u0014\u0000\u0000\u05d9\u05da\u0007\u0002\u0000\u0000\u05da\u05db"+ - "\u0007\u0001\u0000\u0000\u05db\u05dc\u0007\t\u0000\u0000\u05dc\u05dd\u0007"+ - "\u0011\u0000\u0000\u05dd\u05de\u0001\u0000\u0000\u0000\u05de\u05df\u0006"+ - "\u00c0\f\u0000\u05df\u05e0\u0006\u00c0\u0000\u0000\u05e0\u0191\u0001\u0000"+ - "\u0000\u0000\u05e1\u05e2\u0003\u00b4R\u0000\u05e2\u05e3\u0001\u0000\u0000"+ - "\u0000\u05e3\u05e4\u0006\u00c1 \u0000\u05e4\u0193\u0001\u0000\u0000\u0000"+ - "\u05e5\u05e6\u0003\u00b8T\u0000\u05e6\u05e7\u0001\u0000\u0000\u0000\u05e7"+ - "\u05e8\u0006\u00c2\u001f\u0000\u05e8\u0195\u0001\u0000\u0000\u0000\u05e9"+ - "\u05ea\u0003B\u0019\u0000\u05ea\u05eb\u0001\u0000\u0000\u0000\u05eb\u05ec"+ - "\u0006\u00c3\u000b\u0000\u05ec\u0197\u0001\u0000\u0000\u0000\u05ed\u05ee"+ - "\u0003D\u001a\u0000\u05ee\u05ef\u0001\u0000\u0000\u0000\u05ef\u05f0\u0006"+ - "\u00c4\u000b\u0000\u05f0\u0199\u0001\u0000\u0000\u0000\u05f1\u05f2\u0003"+ - "F\u001b\u0000\u05f2\u05f3\u0001\u0000\u0000\u0000\u05f3\u05f4\u0006\u00c5"+ - "\u000b\u0000\u05f4\u019b\u0001\u0000\u0000\u0000\u05f5\u05f6\u0003H\u001c"+ - "\u0000\u05f6\u05f7\u0001\u0000\u0000\u0000\u05f7\u05f8\u0006\u00c6\u0010"+ - "\u0000\u05f8\u05f9\u0006\u00c6\f\u0000\u05f9\u019d\u0001\u0000\u0000\u0000"+ - "\u05fa\u05fb\u0003\u00dae\u0000\u05fb\u05fc\u0001\u0000\u0000\u0000\u05fc"+ - "\u05fd\u0006\u00c7\u0015\u0000\u05fd\u05fe\u0006\u00c7\f\u0000\u05fe\u05ff"+ - "\u0006\u00c7%\u0000\u05ff\u019f\u0001\u0000\u0000\u0000\u0600\u0601\u0003"+ - "^\'\u0000\u0601\u0602\u0001\u0000\u0000\u0000\u0602\u0603\u0006\u00c8"+ - "\u0016\u0000\u0603\u0604\u0006\u00c8\f\u0000\u0604\u0605\u0006\u00c8%"+ - "\u0000\u0605\u01a1\u0001\u0000\u0000\u0000\u0606\u0607\u0003B\u0019\u0000"+ - "\u0607\u0608\u0001\u0000\u0000\u0000\u0608\u0609\u0006\u00c9\u000b\u0000"+ - "\u0609\u01a3\u0001\u0000\u0000\u0000\u060a\u060b\u0003D\u001a\u0000\u060b"+ - "\u060c\u0001\u0000\u0000\u0000\u060c\u060d\u0006\u00ca\u000b\u0000\u060d"+ - "\u01a5\u0001\u0000\u0000\u0000\u060e\u060f\u0003F\u001b\u0000\u060f\u0610"+ - "\u0001\u0000\u0000\u0000\u0610\u0611\u0006\u00cb\u000b\u0000\u0611\u01a7"+ - "\u0001\u0000\u0000\u0000\u0612\u0613\u0003n/\u0000\u0613\u0614\u0001\u0000"+ - "\u0000\u0000\u0614\u0615\u0006\u00cc\u0012\u0000\u0615\u0616\u0006\u00cc"+ - "\f\u0000\u0616\u0617\u0006\u00cc\t\u0000\u0617\u01a9\u0001\u0000\u0000"+ - "\u0000\u0618\u0619\u0003p0\u0000\u0619\u061a\u0001\u0000\u0000\u0000\u061a"+ - "\u061b\u0006\u00cd\u0013\u0000\u061b\u061c\u0006\u00cd\f\u0000\u061c\u061d"+ - "\u0006\u00cd\t\u0000\u061d\u01ab\u0001\u0000\u0000\u0000\u061e\u061f\u0003"+ - "B\u0019\u0000\u061f\u0620\u0001\u0000\u0000\u0000\u0620\u0621\u0006\u00ce"+ - "\u000b\u0000\u0621\u01ad\u0001\u0000\u0000\u0000\u0622\u0623\u0003D\u001a"+ - "\u0000\u0623\u0624\u0001\u0000\u0000\u0000\u0624\u0625\u0006\u00cf\u000b"+ - "\u0000\u0625\u01af\u0001\u0000\u0000\u0000\u0626\u0627\u0003F\u001b\u0000"+ - "\u0627\u0628\u0001\u0000\u0000\u0000\u0628\u0629\u0006\u00d0\u000b\u0000"+ - "\u0629\u01b1\u0001\u0000\u0000\u0000\u062a\u062b\u0003\u00b8T\u0000\u062b"+ - "\u062c\u0001\u0000\u0000\u0000\u062c\u062d\u0006\u00d1\f\u0000\u062d\u062e"+ - "\u0006\u00d1\u0000\u0000\u062e\u062f\u0006\u00d1\u001f\u0000\u062f\u01b3"+ - "\u0001\u0000\u0000\u0000\u0630\u0631\u0003\u00b4R\u0000\u0631\u0632\u0001"+ - "\u0000\u0000\u0000\u0632\u0633\u0006\u00d2\f\u0000\u0633\u0634\u0006\u00d2"+ - "\u0000\u0000\u0634\u0635\u0006\u00d2 \u0000\u0635\u01b5\u0001\u0000\u0000"+ - "\u0000\u0636\u0637\u0003d*\u0000\u0637\u0638\u0001\u0000\u0000\u0000\u0638"+ - "\u0639\u0006\u00d3\f\u0000\u0639\u063a\u0006\u00d3\u0000\u0000\u063a\u063b"+ - "\u0006\u00d3&\u0000\u063b\u01b7\u0001\u0000\u0000\u0000\u063c\u063d\u0003"+ - "H\u001c\u0000\u063d\u063e\u0001\u0000\u0000\u0000\u063e\u063f\u0006\u00d4"+ - "\u0010\u0000\u063f\u0640\u0006\u00d4\f\u0000\u0640\u01b9\u0001\u0000\u0000"+ + "\u04cc\u0001\u0000\u0000\u0000\u011c\u04d1\u0001\u0000\u0000\u0000\u011e"+ + "\u04d8\u0001\u0000\u0000\u0000\u0120\u04e1\u0001\u0000\u0000\u0000\u0122"+ + "\u04e8\u0001\u0000\u0000\u0000\u0124\u04ec\u0001\u0000\u0000\u0000\u0126"+ + "\u04f0\u0001\u0000\u0000\u0000\u0128\u04f4\u0001\u0000\u0000\u0000\u012a"+ + "\u04f8\u0001\u0000\u0000\u0000\u012c\u04fe\u0001\u0000\u0000\u0000\u012e"+ + "\u0502\u0001\u0000\u0000\u0000\u0130\u0506\u0001\u0000\u0000\u0000\u0132"+ + "\u050a\u0001\u0000\u0000\u0000\u0134\u050e\u0001\u0000\u0000\u0000\u0136"+ + "\u0512\u0001\u0000\u0000\u0000\u0138\u0516\u0001\u0000\u0000\u0000\u013a"+ + "\u051b\u0001\u0000\u0000\u0000\u013c\u0520\u0001\u0000\u0000\u0000\u013e"+ + "\u0524\u0001\u0000\u0000\u0000\u0140\u0528\u0001\u0000\u0000\u0000\u0142"+ + "\u052c\u0001\u0000\u0000\u0000\u0144\u0531\u0001\u0000\u0000\u0000\u0146"+ + "\u0535\u0001\u0000\u0000\u0000\u0148\u053a\u0001\u0000\u0000\u0000\u014a"+ + "\u053f\u0001\u0000\u0000\u0000\u014c\u0543\u0001\u0000\u0000\u0000\u014e"+ + "\u0547\u0001\u0000\u0000\u0000\u0150\u054b\u0001\u0000\u0000\u0000\u0152"+ + "\u054f\u0001\u0000\u0000\u0000\u0154\u0553\u0001\u0000\u0000\u0000\u0156"+ + "\u0558\u0001\u0000\u0000\u0000\u0158\u055d\u0001\u0000\u0000\u0000\u015a"+ + "\u0561\u0001\u0000\u0000\u0000\u015c\u0565\u0001\u0000\u0000\u0000\u015e"+ + "\u0569\u0001\u0000\u0000\u0000\u0160\u056e\u0001\u0000\u0000\u0000\u0162"+ + "\u0577\u0001\u0000\u0000\u0000\u0164\u057b\u0001\u0000\u0000\u0000\u0166"+ + "\u057f\u0001\u0000\u0000\u0000\u0168\u0583\u0001\u0000\u0000\u0000\u016a"+ + "\u0587\u0001\u0000\u0000\u0000\u016c\u058c\u0001\u0000\u0000\u0000\u016e"+ + "\u0590\u0001\u0000\u0000\u0000\u0170\u0594\u0001\u0000\u0000\u0000\u0172"+ + "\u0598\u0001\u0000\u0000\u0000\u0174\u059d\u0001\u0000\u0000\u0000\u0176"+ + "\u05a1\u0001\u0000\u0000\u0000\u0178\u05a5\u0001\u0000\u0000\u0000\u017a"+ + "\u05a9\u0001\u0000\u0000\u0000\u017c\u05ad\u0001\u0000\u0000\u0000\u017e"+ + "\u05b1\u0001\u0000\u0000\u0000\u0180\u05b7\u0001\u0000\u0000\u0000\u0182"+ + "\u05bb\u0001\u0000\u0000\u0000\u0184\u05bf\u0001\u0000\u0000\u0000\u0186"+ + "\u05c3\u0001\u0000\u0000\u0000\u0188\u05c7\u0001\u0000\u0000\u0000\u018a"+ + "\u05cb\u0001\u0000\u0000\u0000\u018c\u05cf\u0001\u0000\u0000\u0000\u018e"+ + "\u05d4\u0001\u0000\u0000\u0000\u0190\u05d8\u0001\u0000\u0000\u0000\u0192"+ + "\u05dc\u0001\u0000\u0000\u0000\u0194\u05e2\u0001\u0000\u0000\u0000\u0196"+ + "\u05eb\u0001\u0000\u0000\u0000\u0198\u05ef\u0001\u0000\u0000\u0000\u019a"+ + "\u05f3\u0001\u0000\u0000\u0000\u019c\u05f7\u0001\u0000\u0000\u0000\u019e"+ + "\u05fb\u0001\u0000\u0000\u0000\u01a0\u05ff\u0001\u0000\u0000\u0000\u01a2"+ + "\u0604\u0001\u0000\u0000\u0000\u01a4\u060a\u0001\u0000\u0000\u0000\u01a6"+ + "\u0610\u0001\u0000\u0000\u0000\u01a8\u0614\u0001\u0000\u0000\u0000\u01aa"+ + "\u0618\u0001\u0000\u0000\u0000\u01ac\u061c\u0001\u0000\u0000\u0000\u01ae"+ + "\u0622\u0001\u0000\u0000\u0000\u01b0\u0628\u0001\u0000\u0000\u0000\u01b2"+ + "\u062c\u0001\u0000\u0000\u0000\u01b4\u0630\u0001\u0000\u0000\u0000\u01b6"+ + "\u0634\u0001\u0000\u0000\u0000\u01b8\u063a\u0001\u0000\u0000\u0000\u01ba"+ + "\u0640\u0001\u0000\u0000\u0000\u01bc\u0646\u0001\u0000\u0000\u0000\u01be"+ + "\u01bf\u0007\u0000\u0000\u0000\u01bf\u01c0\u0007\u0001\u0000\u0000\u01c0"+ + "\u01c1\u0007\u0002\u0000\u0000\u01c1\u01c2\u0007\u0002\u0000\u0000\u01c2"+ + "\u01c3\u0007\u0003\u0000\u0000\u01c3\u01c4\u0007\u0004\u0000\u0000\u01c4"+ + "\u01c5\u0007\u0005\u0000\u0000\u01c5\u01c6\u0001\u0000\u0000\u0000\u01c6"+ + "\u01c7\u0006\u0000\u0000\u0000\u01c7\u0011\u0001\u0000\u0000\u0000\u01c8"+ + "\u01c9\u0007\u0000\u0000\u0000\u01c9\u01ca\u0007\u0006\u0000\u0000\u01ca"+ + "\u01cb\u0007\u0007\u0000\u0000\u01cb\u01cc\u0007\b\u0000\u0000\u01cc\u01cd"+ + "\u0001\u0000\u0000\u0000\u01cd\u01ce\u0006\u0001\u0001\u0000\u01ce\u0013"+ + "\u0001\u0000\u0000\u0000\u01cf\u01d0\u0007\u0003\u0000\u0000\u01d0\u01d1"+ + "\u0007\t\u0000\u0000\u01d1\u01d2\u0007\u0006\u0000\u0000\u01d2\u01d3\u0007"+ + "\u0001\u0000\u0000\u01d3\u01d4\u0007\u0004\u0000\u0000\u01d4\u01d5\u0007"+ + "\n\u0000\u0000\u01d5\u01d6\u0001\u0000\u0000\u0000\u01d6\u01d7\u0006\u0002"+ + "\u0002\u0000\u01d7\u0015\u0001\u0000\u0000\u0000\u01d8\u01d9\u0007\u0003"+ + "\u0000\u0000\u01d9\u01da\u0007\u000b\u0000\u0000\u01da\u01db\u0007\f\u0000"+ + "\u0000\u01db\u01dc\u0007\r\u0000\u0000\u01dc\u01dd\u0001\u0000\u0000\u0000"+ + "\u01dd\u01de\u0006\u0003\u0000\u0000\u01de\u0017\u0001\u0000\u0000\u0000"+ + "\u01df\u01e0\u0007\u0003\u0000\u0000\u01e0\u01e1\u0007\u000e\u0000\u0000"+ + "\u01e1\u01e2\u0007\b\u0000\u0000\u01e2\u01e3\u0007\r\u0000\u0000\u01e3"+ + "\u01e4\u0007\f\u0000\u0000\u01e4\u01e5\u0007\u0001\u0000\u0000\u01e5\u01e6"+ + "\u0007\t\u0000\u0000\u01e6\u01e7\u0001\u0000\u0000\u0000\u01e7\u01e8\u0006"+ + "\u0004\u0003\u0000\u01e8\u0019\u0001\u0000\u0000\u0000\u01e9\u01ea\u0007"+ + "\u000f\u0000\u0000\u01ea\u01eb\u0007\u0006\u0000\u0000\u01eb\u01ec\u0007"+ + "\u0007\u0000\u0000\u01ec\u01ed\u0007\u0010\u0000\u0000\u01ed\u01ee\u0001"+ + "\u0000\u0000\u0000\u01ee\u01ef\u0006\u0005\u0004\u0000\u01ef\u001b\u0001"+ + "\u0000\u0000\u0000\u01f0\u01f1\u0007\u0011\u0000\u0000\u01f1\u01f2\u0007"+ + "\u0006\u0000\u0000\u01f2\u01f3\u0007\u0007\u0000\u0000\u01f3\u01f4\u0007"+ + "\u0012\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000\u0000\u01f5\u01f6\u0006"+ + "\u0006\u0000\u0000\u01f6\u001d\u0001\u0000\u0000\u0000\u01f7\u01f8\u0007"+ + "\u0012\u0000\u0000\u01f8\u01f9\u0007\u0003\u0000\u0000\u01f9\u01fa\u0007"+ + "\u0003\u0000\u0000\u01fa\u01fb\u0007\b\u0000\u0000\u01fb\u01fc\u0001\u0000"+ + "\u0000\u0000\u01fc\u01fd\u0006\u0007\u0001\u0000\u01fd\u001f\u0001\u0000"+ + "\u0000\u0000\u01fe\u01ff\u0007\r\u0000\u0000\u01ff\u0200\u0007\u0001\u0000"+ + "\u0000\u0200\u0201\u0007\u0010\u0000\u0000\u0201\u0202\u0007\u0001\u0000"+ + "\u0000\u0202\u0203\u0007\u0005\u0000\u0000\u0203\u0204\u0001\u0000\u0000"+ + "\u0000\u0204\u0205\u0006\b\u0000\u0000\u0205!\u0001\u0000\u0000\u0000"+ + "\u0206\u0207\u0007\u0010\u0000\u0000\u0207\u0208\u0007\u000b\u0000\u0000"+ + "\u0208\u0209\u0005_\u0000\u0000\u0209\u020a\u0007\u0003\u0000\u0000\u020a"+ + "\u020b\u0007\u000e\u0000\u0000\u020b\u020c\u0007\b\u0000\u0000\u020c\u020d"+ + "\u0007\f\u0000\u0000\u020d\u020e\u0007\t\u0000\u0000\u020e\u020f\u0007"+ + "\u0000\u0000\u0000\u020f\u0210\u0001\u0000\u0000\u0000\u0210\u0211\u0006"+ + "\t\u0005\u0000\u0211#\u0001\u0000\u0000\u0000\u0212\u0213\u0007\u0006"+ + "\u0000\u0000\u0213\u0214\u0007\u0003\u0000\u0000\u0214\u0215\u0007\t\u0000"+ + "\u0000\u0215\u0216\u0007\f\u0000\u0000\u0216\u0217\u0007\u0010\u0000\u0000"+ + "\u0217\u0218\u0007\u0003\u0000\u0000\u0218\u0219\u0001\u0000\u0000\u0000"+ + "\u0219\u021a\u0006\n\u0006\u0000\u021a%\u0001\u0000\u0000\u0000\u021b"+ + "\u021c\u0007\u0006\u0000\u0000\u021c\u021d\u0007\u0007\u0000\u0000\u021d"+ + "\u021e\u0007\u0013\u0000\u0000\u021e\u021f\u0001\u0000\u0000\u0000\u021f"+ + "\u0220\u0006\u000b\u0000\u0000\u0220\'\u0001\u0000\u0000\u0000\u0221\u0222"+ + "\u0007\u0002\u0000\u0000\u0222\u0223\u0007\n\u0000\u0000\u0223\u0224\u0007"+ + "\u0007\u0000\u0000\u0224\u0225\u0007\u0013\u0000\u0000\u0225\u0226\u0001"+ + "\u0000\u0000\u0000\u0226\u0227\u0006\f\u0007\u0000\u0227)\u0001\u0000"+ + "\u0000\u0000\u0228\u0229\u0007\u0002\u0000\u0000\u0229\u022a\u0007\u0007"+ + "\u0000\u0000\u022a\u022b\u0007\u0006\u0000\u0000\u022b\u022c\u0007\u0005"+ + "\u0000\u0000\u022c\u022d\u0001\u0000\u0000\u0000\u022d\u022e\u0006\r\u0000"+ + "\u0000\u022e+\u0001\u0000\u0000\u0000\u022f\u0230\u0007\u0002\u0000\u0000"+ + "\u0230\u0231\u0007\u0005\u0000\u0000\u0231\u0232\u0007\f\u0000\u0000\u0232"+ + "\u0233\u0007\u0005\u0000\u0000\u0233\u0234\u0007\u0002\u0000\u0000\u0234"+ + "\u0235\u0001\u0000\u0000\u0000\u0235\u0236\u0006\u000e\u0000\u0000\u0236"+ + "-\u0001\u0000\u0000\u0000\u0237\u0238\u0007\u0013\u0000\u0000\u0238\u0239"+ + "\u0007\n\u0000\u0000\u0239\u023a\u0007\u0003\u0000\u0000\u023a\u023b\u0007"+ + "\u0006\u0000\u0000\u023b\u023c\u0007\u0003\u0000\u0000\u023c\u023d\u0001"+ + "\u0000\u0000\u0000\u023d\u023e\u0006\u000f\u0000\u0000\u023e/\u0001\u0000"+ + "\u0000\u0000\u023f\u0240\u0004\u0010\u0000\u0000\u0240\u0241\u0007\u0001"+ + "\u0000\u0000\u0241\u0242\u0007\t\u0000\u0000\u0242\u0243\u0007\r\u0000"+ + "\u0000\u0243\u0244\u0007\u0001\u0000\u0000\u0244\u0245\u0007\t\u0000\u0000"+ + "\u0245\u0246\u0007\u0003\u0000\u0000\u0246\u0247\u0007\u0002\u0000\u0000"+ + "\u0247\u0248\u0007\u0005\u0000\u0000\u0248\u0249\u0007\f\u0000\u0000\u0249"+ + "\u024a\u0007\u0005\u0000\u0000\u024a\u024b\u0007\u0002\u0000\u0000\u024b"+ + "\u024c\u0001\u0000\u0000\u0000\u024c\u024d\u0006\u0010\u0000\u0000\u024d"+ + "1\u0001\u0000\u0000\u0000\u024e\u024f\u0004\u0011\u0001\u0000\u024f\u0250"+ + "\u0007\r\u0000\u0000\u0250\u0251\u0007\u0007\u0000\u0000\u0251\u0252\u0007"+ + "\u0007\u0000\u0000\u0252\u0253\u0007\u0012\u0000\u0000\u0253\u0254\u0007"+ + "\u0014\u0000\u0000\u0254\u0255\u0007\b\u0000\u0000\u0255\u0256\u0005_"+ + "\u0000\u0000\u0256\u0257\u0005\u8001\uf414\u0000\u0000\u0257\u0258\u0001"+ + "\u0000\u0000\u0000\u0258\u0259\u0006\u0011\b\u0000\u02593\u0001\u0000"+ + "\u0000\u0000\u025a\u025b\u0004\u0012\u0002\u0000\u025b\u025c\u0007\u0010"+ + "\u0000\u0000\u025c\u025d\u0007\u0003\u0000\u0000\u025d\u025e\u0007\u0005"+ + "\u0000\u0000\u025e\u025f\u0007\u0006\u0000\u0000\u025f\u0260\u0007\u0001"+ + "\u0000\u0000\u0260\u0261\u0007\u0004\u0000\u0000\u0261\u0262\u0007\u0002"+ + "\u0000\u0000\u0262\u0263\u0001\u0000\u0000\u0000\u0263\u0264\u0006\u0012"+ + "\t\u0000\u02645\u0001\u0000\u0000\u0000\u0265\u0266\u0004\u0013\u0003"+ + "\u0000\u0266\u0267\u0007\u0015\u0000\u0000\u0267\u0268\u0007\u0007\u0000"+ + "\u0000\u0268\u0269\u0007\u0001\u0000\u0000\u0269\u026a\u0007\t\u0000\u0000"+ + "\u026a\u026b\u0001\u0000\u0000\u0000\u026b\u026c\u0006\u0013\n\u0000\u026c"+ + "7\u0001\u0000\u0000\u0000\u026d\u026e\u0004\u0014\u0004\u0000\u026e\u026f"+ + "\u0007\u000f\u0000\u0000\u026f\u0270\u0007\u0014\u0000\u0000\u0270\u0271"+ + "\u0007\r\u0000\u0000\u0271\u0272\u0007\r\u0000\u0000\u0272\u0273\u0001"+ + "\u0000\u0000\u0000\u0273\u0274\u0006\u0014\n\u0000\u02749\u0001\u0000"+ + "\u0000\u0000\u0275\u0276\u0004\u0015\u0005\u0000\u0276\u0277\u0007\r\u0000"+ + "\u0000\u0277\u0278\u0007\u0003\u0000\u0000\u0278\u0279\u0007\u000f\u0000"+ + "\u0000\u0279\u027a\u0007\u0005\u0000\u0000\u027a\u027b\u0001\u0000\u0000"+ + "\u0000\u027b\u027c\u0006\u0015\n\u0000\u027c;\u0001\u0000\u0000\u0000"+ + "\u027d\u027e\u0004\u0016\u0006\u0000\u027e\u027f\u0007\u0006\u0000\u0000"+ + "\u027f\u0280\u0007\u0001\u0000\u0000\u0280\u0281\u0007\u0011\u0000\u0000"+ + "\u0281\u0282\u0007\n\u0000\u0000\u0282\u0283\u0007\u0005\u0000\u0000\u0283"+ + "\u0284\u0001\u0000\u0000\u0000\u0284\u0285\u0006\u0016\n\u0000\u0285="+ + "\u0001\u0000\u0000\u0000\u0286\u0287\u0004\u0017\u0007\u0000\u0287\u0288"+ + "\u0007\r\u0000\u0000\u0288\u0289\u0007\u0007\u0000\u0000\u0289\u028a\u0007"+ + "\u0007\u0000\u0000\u028a\u028b\u0007\u0012\u0000\u0000\u028b\u028c\u0007"+ + "\u0014\u0000\u0000\u028c\u028d\u0007\b\u0000\u0000\u028d\u028e\u0001\u0000"+ + "\u0000\u0000\u028e\u028f\u0006\u0017\n\u0000\u028f?\u0001\u0000\u0000"+ + "\u0000\u0290\u0292\b\u0016\u0000\u0000\u0291\u0290\u0001\u0000\u0000\u0000"+ + "\u0292\u0293\u0001\u0000\u0000\u0000\u0293\u0291\u0001\u0000\u0000\u0000"+ + "\u0293\u0294\u0001\u0000\u0000\u0000\u0294\u0295\u0001\u0000\u0000\u0000"+ + "\u0295\u0296\u0006\u0018\u0000\u0000\u0296A\u0001\u0000\u0000\u0000\u0297"+ + "\u0298\u0005/\u0000\u0000\u0298\u0299\u0005/\u0000\u0000\u0299\u029d\u0001"+ + "\u0000\u0000\u0000\u029a\u029c\b\u0017\u0000\u0000\u029b\u029a\u0001\u0000"+ + "\u0000\u0000\u029c\u029f\u0001\u0000\u0000\u0000\u029d\u029b\u0001\u0000"+ + "\u0000\u0000\u029d\u029e\u0001\u0000\u0000\u0000\u029e\u02a1\u0001\u0000"+ + "\u0000\u0000\u029f\u029d\u0001\u0000\u0000\u0000\u02a0\u02a2\u0005\r\u0000"+ + "\u0000\u02a1\u02a0\u0001\u0000\u0000\u0000\u02a1\u02a2\u0001\u0000\u0000"+ + "\u0000\u02a2\u02a4\u0001\u0000\u0000\u0000\u02a3\u02a5\u0005\n\u0000\u0000"+ + "\u02a4\u02a3\u0001\u0000\u0000\u0000\u02a4\u02a5\u0001\u0000\u0000\u0000"+ + "\u02a5\u02a6\u0001\u0000\u0000\u0000\u02a6\u02a7\u0006\u0019\u000b\u0000"+ + "\u02a7C\u0001\u0000\u0000\u0000\u02a8\u02a9\u0005/\u0000\u0000\u02a9\u02aa"+ + "\u0005*\u0000\u0000\u02aa\u02af\u0001\u0000\u0000\u0000\u02ab\u02ae\u0003"+ + "D\u001a\u0000\u02ac\u02ae\t\u0000\u0000\u0000\u02ad\u02ab\u0001\u0000"+ + "\u0000\u0000\u02ad\u02ac\u0001\u0000\u0000\u0000\u02ae\u02b1\u0001\u0000"+ + "\u0000\u0000\u02af\u02b0\u0001\u0000\u0000\u0000\u02af\u02ad\u0001\u0000"+ + "\u0000\u0000\u02b0\u02b2\u0001\u0000\u0000\u0000\u02b1\u02af\u0001\u0000"+ + "\u0000\u0000\u02b2\u02b3\u0005*\u0000\u0000\u02b3\u02b4\u0005/\u0000\u0000"+ + "\u02b4\u02b5\u0001\u0000\u0000\u0000\u02b5\u02b6\u0006\u001a\u000b\u0000"+ + "\u02b6E\u0001\u0000\u0000\u0000\u02b7\u02b9\u0007\u0018\u0000\u0000\u02b8"+ + "\u02b7\u0001\u0000\u0000\u0000\u02b9\u02ba\u0001\u0000\u0000\u0000\u02ba"+ + "\u02b8\u0001\u0000\u0000\u0000\u02ba\u02bb\u0001\u0000\u0000\u0000\u02bb"+ + "\u02bc\u0001\u0000\u0000\u0000\u02bc\u02bd\u0006\u001b\u000b\u0000\u02bd"+ + "G\u0001\u0000\u0000\u0000\u02be\u02bf\u0005|\u0000\u0000\u02bf\u02c0\u0001"+ + "\u0000\u0000\u0000\u02c0\u02c1\u0006\u001c\f\u0000\u02c1I\u0001\u0000"+ + "\u0000\u0000\u02c2\u02c3\u0007\u0019\u0000\u0000\u02c3K\u0001\u0000\u0000"+ + "\u0000\u02c4\u02c5\u0007\u001a\u0000\u0000\u02c5M\u0001\u0000\u0000\u0000"+ + "\u02c6\u02c7\u0005\\\u0000\u0000\u02c7\u02c8\u0007\u001b\u0000\u0000\u02c8"+ + "O\u0001\u0000\u0000\u0000\u02c9\u02ca\b\u001c\u0000\u0000\u02caQ\u0001"+ + "\u0000\u0000\u0000\u02cb\u02cd\u0007\u0003\u0000\u0000\u02cc\u02ce\u0007"+ + "\u001d\u0000\u0000\u02cd\u02cc\u0001\u0000\u0000\u0000\u02cd\u02ce\u0001"+ + "\u0000\u0000\u0000\u02ce\u02d0\u0001\u0000\u0000\u0000\u02cf\u02d1\u0003"+ + "J\u001d\u0000\u02d0\u02cf\u0001\u0000\u0000\u0000\u02d1\u02d2\u0001\u0000"+ + "\u0000\u0000\u02d2\u02d0\u0001\u0000\u0000\u0000\u02d2\u02d3\u0001\u0000"+ + "\u0000\u0000\u02d3S\u0001\u0000\u0000\u0000\u02d4\u02d5\u0005@\u0000\u0000"+ + "\u02d5U\u0001\u0000\u0000\u0000\u02d6\u02d7\u0005`\u0000\u0000\u02d7W"+ + "\u0001\u0000\u0000\u0000\u02d8\u02dc\b\u001e\u0000\u0000\u02d9\u02da\u0005"+ + "`\u0000\u0000\u02da\u02dc\u0005`\u0000\u0000\u02db\u02d8\u0001\u0000\u0000"+ + "\u0000\u02db\u02d9\u0001\u0000\u0000\u0000\u02dcY\u0001\u0000\u0000\u0000"+ + "\u02dd\u02de\u0005_\u0000\u0000\u02de[\u0001\u0000\u0000\u0000\u02df\u02e3"+ + "\u0003L\u001e\u0000\u02e0\u02e3\u0003J\u001d\u0000\u02e1\u02e3\u0003Z"+ + "%\u0000\u02e2\u02df\u0001\u0000\u0000\u0000\u02e2\u02e0\u0001\u0000\u0000"+ + "\u0000\u02e2\u02e1\u0001\u0000\u0000\u0000\u02e3]\u0001\u0000\u0000\u0000"+ + "\u02e4\u02e9\u0005\"\u0000\u0000\u02e5\u02e8\u0003N\u001f\u0000\u02e6"+ + "\u02e8\u0003P \u0000\u02e7\u02e5\u0001\u0000\u0000\u0000\u02e7\u02e6\u0001"+ + "\u0000\u0000\u0000\u02e8\u02eb\u0001\u0000\u0000\u0000\u02e9\u02e7\u0001"+ + "\u0000\u0000\u0000\u02e9\u02ea\u0001\u0000\u0000\u0000\u02ea\u02ec\u0001"+ + "\u0000\u0000\u0000\u02eb\u02e9\u0001\u0000\u0000\u0000\u02ec\u0302\u0005"+ + "\"\u0000\u0000\u02ed\u02ee\u0005\"\u0000\u0000\u02ee\u02ef\u0005\"\u0000"+ + "\u0000\u02ef\u02f0\u0005\"\u0000\u0000\u02f0\u02f4\u0001\u0000\u0000\u0000"+ + "\u02f1\u02f3\b\u0017\u0000\u0000\u02f2\u02f1\u0001\u0000\u0000\u0000\u02f3"+ + "\u02f6\u0001\u0000\u0000\u0000\u02f4\u02f5\u0001\u0000\u0000\u0000\u02f4"+ + "\u02f2\u0001\u0000\u0000\u0000\u02f5\u02f7\u0001\u0000\u0000\u0000\u02f6"+ + "\u02f4\u0001\u0000\u0000\u0000\u02f7\u02f8\u0005\"\u0000\u0000\u02f8\u02f9"+ + "\u0005\"\u0000\u0000\u02f9\u02fa\u0005\"\u0000\u0000\u02fa\u02fc\u0001"+ + "\u0000\u0000\u0000\u02fb\u02fd\u0005\"\u0000\u0000\u02fc\u02fb\u0001\u0000"+ + "\u0000\u0000\u02fc\u02fd\u0001\u0000\u0000\u0000\u02fd\u02ff\u0001\u0000"+ + "\u0000\u0000\u02fe\u0300\u0005\"\u0000\u0000\u02ff\u02fe\u0001\u0000\u0000"+ + "\u0000\u02ff\u0300\u0001\u0000\u0000\u0000\u0300\u0302\u0001\u0000\u0000"+ + "\u0000\u0301\u02e4\u0001\u0000\u0000\u0000\u0301\u02ed\u0001\u0000\u0000"+ + "\u0000\u0302_\u0001\u0000\u0000\u0000\u0303\u0305\u0003J\u001d\u0000\u0304"+ + "\u0303\u0001\u0000\u0000\u0000\u0305\u0306\u0001\u0000\u0000\u0000\u0306"+ + "\u0304\u0001\u0000\u0000\u0000\u0306\u0307\u0001\u0000\u0000\u0000\u0307"+ + "a\u0001\u0000\u0000\u0000\u0308\u030a\u0003J\u001d\u0000\u0309\u0308\u0001"+ + "\u0000\u0000\u0000\u030a\u030b\u0001\u0000\u0000\u0000\u030b\u0309\u0001"+ + "\u0000\u0000\u0000\u030b\u030c\u0001\u0000\u0000\u0000\u030c\u030d\u0001"+ + "\u0000\u0000\u0000\u030d\u0311\u0003t2\u0000\u030e\u0310\u0003J\u001d"+ + "\u0000\u030f\u030e\u0001\u0000\u0000\u0000\u0310\u0313\u0001\u0000\u0000"+ + "\u0000\u0311\u030f\u0001\u0000\u0000\u0000\u0311\u0312\u0001\u0000\u0000"+ + "\u0000\u0312\u0333\u0001\u0000\u0000\u0000\u0313\u0311\u0001\u0000\u0000"+ + "\u0000\u0314\u0316\u0003t2\u0000\u0315\u0317\u0003J\u001d\u0000\u0316"+ + "\u0315\u0001\u0000\u0000\u0000\u0317\u0318\u0001\u0000\u0000\u0000\u0318"+ + "\u0316\u0001\u0000\u0000\u0000\u0318\u0319\u0001\u0000\u0000\u0000\u0319"+ + "\u0333\u0001\u0000\u0000\u0000\u031a\u031c\u0003J\u001d\u0000\u031b\u031a"+ + "\u0001\u0000\u0000\u0000\u031c\u031d\u0001\u0000\u0000\u0000\u031d\u031b"+ + "\u0001\u0000\u0000\u0000\u031d\u031e\u0001\u0000\u0000\u0000\u031e\u0326"+ + "\u0001\u0000\u0000\u0000\u031f\u0323\u0003t2\u0000\u0320\u0322\u0003J"+ + "\u001d\u0000\u0321\u0320\u0001\u0000\u0000\u0000\u0322\u0325\u0001\u0000"+ + "\u0000\u0000\u0323\u0321\u0001\u0000\u0000\u0000\u0323\u0324\u0001\u0000"+ + "\u0000\u0000\u0324\u0327\u0001\u0000\u0000\u0000\u0325\u0323\u0001\u0000"+ + "\u0000\u0000\u0326\u031f\u0001\u0000\u0000\u0000\u0326\u0327\u0001\u0000"+ + "\u0000\u0000\u0327\u0328\u0001\u0000\u0000\u0000\u0328\u0329\u0003R!\u0000"+ + "\u0329\u0333\u0001\u0000\u0000\u0000\u032a\u032c\u0003t2\u0000\u032b\u032d"+ + "\u0003J\u001d\u0000\u032c\u032b\u0001\u0000\u0000\u0000\u032d\u032e\u0001"+ + "\u0000\u0000\u0000\u032e\u032c\u0001\u0000\u0000\u0000\u032e\u032f\u0001"+ + "\u0000\u0000\u0000\u032f\u0330\u0001\u0000\u0000\u0000\u0330\u0331\u0003"+ + "R!\u0000\u0331\u0333\u0001\u0000\u0000\u0000\u0332\u0309\u0001\u0000\u0000"+ + "\u0000\u0332\u0314\u0001\u0000\u0000\u0000\u0332\u031b\u0001\u0000\u0000"+ + "\u0000\u0332\u032a\u0001\u0000\u0000\u0000\u0333c\u0001\u0000\u0000\u0000"+ + "\u0334\u0335\u0007\u001f\u0000\u0000\u0335\u0336\u0007 \u0000\u0000\u0336"+ + "e\u0001\u0000\u0000\u0000\u0337\u0338\u0007\f\u0000\u0000\u0338\u0339"+ + "\u0007\t\u0000\u0000\u0339\u033a\u0007\u0000\u0000\u0000\u033ag\u0001"+ + "\u0000\u0000\u0000\u033b\u033c\u0007\f\u0000\u0000\u033c\u033d\u0007\u0002"+ + "\u0000\u0000\u033d\u033e\u0007\u0004\u0000\u0000\u033ei\u0001\u0000\u0000"+ + "\u0000\u033f\u0340\u0005=\u0000\u0000\u0340k\u0001\u0000\u0000\u0000\u0341"+ + "\u0342\u0005:\u0000\u0000\u0342\u0343\u0005:\u0000\u0000\u0343m\u0001"+ + "\u0000\u0000\u0000\u0344\u0345\u0005:\u0000\u0000\u0345o\u0001\u0000\u0000"+ + "\u0000\u0346\u0347\u0005,\u0000\u0000\u0347q\u0001\u0000\u0000\u0000\u0348"+ + "\u0349\u0007\u0000\u0000\u0000\u0349\u034a\u0007\u0003\u0000\u0000\u034a"+ + "\u034b\u0007\u0002\u0000\u0000\u034b\u034c\u0007\u0004\u0000\u0000\u034c"+ + "s\u0001\u0000\u0000\u0000\u034d\u034e\u0005.\u0000\u0000\u034eu\u0001"+ + "\u0000\u0000\u0000\u034f\u0350\u0007\u000f\u0000\u0000\u0350\u0351\u0007"+ + "\f\u0000\u0000\u0351\u0352\u0007\r\u0000\u0000\u0352\u0353\u0007\u0002"+ + "\u0000\u0000\u0353\u0354\u0007\u0003\u0000\u0000\u0354w\u0001\u0000\u0000"+ + "\u0000\u0355\u0356\u0007\u000f\u0000\u0000\u0356\u0357\u0007\u0001\u0000"+ + "\u0000\u0357\u0358\u0007\u0006\u0000\u0000\u0358\u0359\u0007\u0002\u0000"+ + "\u0000\u0359\u035a\u0007\u0005\u0000\u0000\u035ay\u0001\u0000\u0000\u0000"+ + "\u035b\u035c\u0007\u0001\u0000\u0000\u035c\u035d\u0007\t\u0000\u0000\u035d"+ + "{\u0001\u0000\u0000\u0000\u035e\u035f\u0007\u0001\u0000\u0000\u035f\u0360"+ + "\u0007\u0002\u0000\u0000\u0360}\u0001\u0000\u0000\u0000\u0361\u0362\u0007"+ + "\r\u0000\u0000\u0362\u0363\u0007\f\u0000\u0000\u0363\u0364\u0007\u0002"+ + "\u0000\u0000\u0364\u0365\u0007\u0005\u0000\u0000\u0365\u007f\u0001\u0000"+ + "\u0000\u0000\u0366\u0367\u0007\r\u0000\u0000\u0367\u0368\u0007\u0001\u0000"+ + "\u0000\u0368\u0369\u0007\u0012\u0000\u0000\u0369\u036a\u0007\u0003\u0000"+ + "\u0000\u036a\u0081\u0001\u0000\u0000\u0000\u036b\u036c\u0005(\u0000\u0000"+ + "\u036c\u0083\u0001\u0000\u0000\u0000\u036d\u036e\u0007\t\u0000\u0000\u036e"+ + "\u036f\u0007\u0007\u0000\u0000\u036f\u0370\u0007\u0005\u0000\u0000\u0370"+ + "\u0085\u0001\u0000\u0000\u0000\u0371\u0372\u0007\t\u0000\u0000\u0372\u0373"+ + "\u0007\u0014\u0000\u0000\u0373\u0374\u0007\r\u0000\u0000\u0374\u0375\u0007"+ + "\r\u0000\u0000\u0375\u0087\u0001\u0000\u0000\u0000\u0376\u0377\u0007\t"+ + "\u0000\u0000\u0377\u0378\u0007\u0014\u0000\u0000\u0378\u0379\u0007\r\u0000"+ + "\u0000\u0379\u037a\u0007\r\u0000\u0000\u037a\u037b\u0007\u0002\u0000\u0000"+ + "\u037b\u0089\u0001\u0000\u0000\u0000\u037c\u037d\u0007\u0007\u0000\u0000"+ + "\u037d\u037e\u0007\u0006\u0000\u0000\u037e\u008b\u0001\u0000\u0000\u0000"+ + "\u037f\u0380\u0005?\u0000\u0000\u0380\u008d\u0001\u0000\u0000\u0000\u0381"+ + "\u0382\u0007\u0006\u0000\u0000\u0382\u0383\u0007\r\u0000\u0000\u0383\u0384"+ + "\u0007\u0001\u0000\u0000\u0384\u0385\u0007\u0012\u0000\u0000\u0385\u0386"+ + "\u0007\u0003\u0000\u0000\u0386\u008f\u0001\u0000\u0000\u0000\u0387\u0388"+ + "\u0005)\u0000\u0000\u0388\u0091\u0001\u0000\u0000\u0000\u0389\u038a\u0007"+ + "\u0005\u0000\u0000\u038a\u038b\u0007\u0006\u0000\u0000\u038b\u038c\u0007"+ + "\u0014\u0000\u0000\u038c\u038d\u0007\u0003\u0000\u0000\u038d\u0093\u0001"+ + "\u0000\u0000\u0000\u038e\u038f\u0005=\u0000\u0000\u038f\u0390\u0005=\u0000"+ + "\u0000\u0390\u0095\u0001\u0000\u0000\u0000\u0391\u0392\u0005=\u0000\u0000"+ + "\u0392\u0393\u0005~\u0000\u0000\u0393\u0097\u0001\u0000\u0000\u0000\u0394"+ + "\u0395\u0005!\u0000\u0000\u0395\u0396\u0005=\u0000\u0000\u0396\u0099\u0001"+ + "\u0000\u0000\u0000\u0397\u0398\u0005<\u0000\u0000\u0398\u009b\u0001\u0000"+ + "\u0000\u0000\u0399\u039a\u0005<\u0000\u0000\u039a\u039b\u0005=\u0000\u0000"+ + "\u039b\u009d\u0001\u0000\u0000\u0000\u039c\u039d\u0005>\u0000\u0000\u039d"+ + "\u009f\u0001\u0000\u0000\u0000\u039e\u039f\u0005>\u0000\u0000\u039f\u03a0"+ + "\u0005=\u0000\u0000\u03a0\u00a1\u0001\u0000\u0000\u0000\u03a1\u03a2\u0005"+ + "+\u0000\u0000\u03a2\u00a3\u0001\u0000\u0000\u0000\u03a3\u03a4\u0005-\u0000"+ + "\u0000\u03a4\u00a5\u0001\u0000\u0000\u0000\u03a5\u03a6\u0005*\u0000\u0000"+ + "\u03a6\u00a7\u0001\u0000\u0000\u0000\u03a7\u03a8\u0005/\u0000\u0000\u03a8"+ + "\u00a9\u0001\u0000\u0000\u0000\u03a9\u03aa\u0005%\u0000\u0000\u03aa\u00ab"+ + "\u0001\u0000\u0000\u0000\u03ab\u03ac\u0004N\b\u0000\u03ac\u03ad\u0005"+ + "{\u0000\u0000\u03ad\u00ad\u0001\u0000\u0000\u0000\u03ae\u03af\u0004O\t"+ + "\u0000\u03af\u03b0\u0005}\u0000\u0000\u03b0\u00af\u0001\u0000\u0000\u0000"+ + "\u03b1\u03b2\u0003.\u000f\u0000\u03b2\u03b3\u0001\u0000\u0000\u0000\u03b3"+ + "\u03b4\u0006P\r\u0000\u03b4\u00b1\u0001\u0000\u0000\u0000\u03b5\u03b8"+ + "\u0003\u008c>\u0000\u03b6\u03b9\u0003L\u001e\u0000\u03b7\u03b9\u0003Z"+ + "%\u0000\u03b8\u03b6\u0001\u0000\u0000\u0000\u03b8\u03b7\u0001\u0000\u0000"+ + "\u0000\u03b9\u03bd\u0001\u0000\u0000\u0000\u03ba\u03bc\u0003\\&\u0000"+ + "\u03bb\u03ba\u0001\u0000\u0000\u0000\u03bc\u03bf\u0001\u0000\u0000\u0000"+ + "\u03bd\u03bb\u0001\u0000\u0000\u0000\u03bd\u03be\u0001\u0000\u0000\u0000"+ + "\u03be\u03c7\u0001\u0000\u0000\u0000\u03bf\u03bd\u0001\u0000\u0000\u0000"+ + "\u03c0\u03c2\u0003\u008c>\u0000\u03c1\u03c3\u0003J\u001d\u0000\u03c2\u03c1"+ + "\u0001\u0000\u0000\u0000\u03c3\u03c4\u0001\u0000\u0000\u0000\u03c4\u03c2"+ + "\u0001\u0000\u0000\u0000\u03c4\u03c5\u0001\u0000\u0000\u0000\u03c5\u03c7"+ + "\u0001\u0000\u0000\u0000\u03c6\u03b5\u0001\u0000\u0000\u0000\u03c6\u03c0"+ + "\u0001\u0000\u0000\u0000\u03c7\u00b3\u0001\u0000\u0000\u0000\u03c8\u03c9"+ + "\u0005[\u0000\u0000\u03c9\u03ca\u0001\u0000\u0000\u0000\u03ca\u03cb\u0006"+ + "R\u0000\u0000\u03cb\u03cc\u0006R\u0000\u0000\u03cc\u00b5\u0001\u0000\u0000"+ + "\u0000\u03cd\u03ce\u0005]\u0000\u0000\u03ce\u03cf\u0001\u0000\u0000\u0000"+ + "\u03cf\u03d0\u0006S\f\u0000\u03d0\u03d1\u0006S\f\u0000\u03d1\u00b7\u0001"+ + "\u0000\u0000\u0000\u03d2\u03d6\u0003L\u001e\u0000\u03d3\u03d5\u0003\\"+ + "&\u0000\u03d4\u03d3\u0001\u0000\u0000\u0000\u03d5\u03d8\u0001\u0000\u0000"+ + "\u0000\u03d6\u03d4\u0001\u0000\u0000\u0000\u03d6\u03d7\u0001\u0000\u0000"+ + "\u0000\u03d7\u03e3\u0001\u0000\u0000\u0000\u03d8\u03d6\u0001\u0000\u0000"+ + "\u0000\u03d9\u03dc\u0003Z%\u0000\u03da\u03dc\u0003T\"\u0000\u03db\u03d9"+ + "\u0001\u0000\u0000\u0000\u03db\u03da\u0001\u0000\u0000\u0000\u03dc\u03de"+ + "\u0001\u0000\u0000\u0000\u03dd\u03df\u0003\\&\u0000\u03de\u03dd\u0001"+ + "\u0000\u0000\u0000\u03df\u03e0\u0001\u0000\u0000\u0000\u03e0\u03de\u0001"+ + "\u0000\u0000\u0000\u03e0\u03e1\u0001\u0000\u0000\u0000\u03e1\u03e3\u0001"+ + "\u0000\u0000\u0000\u03e2\u03d2\u0001\u0000\u0000\u0000\u03e2\u03db\u0001"+ + "\u0000\u0000\u0000\u03e3\u00b9\u0001\u0000\u0000\u0000\u03e4\u03e6\u0003"+ + "V#\u0000\u03e5\u03e7\u0003X$\u0000\u03e6\u03e5\u0001\u0000\u0000\u0000"+ + "\u03e7\u03e8\u0001\u0000\u0000\u0000\u03e8\u03e6\u0001\u0000\u0000\u0000"+ + "\u03e8\u03e9\u0001\u0000\u0000\u0000\u03e9\u03ea\u0001\u0000\u0000\u0000"+ + "\u03ea\u03eb\u0003V#\u0000\u03eb\u00bb\u0001\u0000\u0000\u0000\u03ec\u03ed"+ + "\u0003\u00baU\u0000\u03ed\u00bd\u0001\u0000\u0000\u0000\u03ee\u03ef\u0003"+ + "B\u0019\u0000\u03ef\u03f0\u0001\u0000\u0000\u0000\u03f0\u03f1\u0006W\u000b"+ + "\u0000\u03f1\u00bf\u0001\u0000\u0000\u0000\u03f2\u03f3\u0003D\u001a\u0000"+ + "\u03f3\u03f4\u0001\u0000\u0000\u0000\u03f4\u03f5\u0006X\u000b\u0000\u03f5"+ + "\u00c1\u0001\u0000\u0000\u0000\u03f6\u03f7\u0003F\u001b\u0000\u03f7\u03f8"+ + "\u0001\u0000\u0000\u0000\u03f8\u03f9\u0006Y\u000b\u0000\u03f9\u00c3\u0001"+ + "\u0000\u0000\u0000\u03fa\u03fb\u0003\u00b4R\u0000\u03fb\u03fc\u0001\u0000"+ + "\u0000\u0000\u03fc\u03fd\u0006Z\u000e\u0000\u03fd\u03fe\u0006Z\u000f\u0000"+ + "\u03fe\u00c5\u0001\u0000\u0000\u0000\u03ff\u0400\u0003H\u001c\u0000\u0400"+ + "\u0401\u0001\u0000\u0000\u0000\u0401\u0402\u0006[\u0010\u0000\u0402\u0403"+ + "\u0006[\f\u0000\u0403\u00c7\u0001\u0000\u0000\u0000\u0404\u0405\u0003"+ + "F\u001b\u0000\u0405\u0406\u0001\u0000\u0000\u0000\u0406\u0407\u0006\\"+ + "\u000b\u0000\u0407\u00c9\u0001\u0000\u0000\u0000\u0408\u0409\u0003B\u0019"+ + "\u0000\u0409\u040a\u0001\u0000\u0000\u0000\u040a\u040b\u0006]\u000b\u0000"+ + "\u040b\u00cb\u0001\u0000\u0000\u0000\u040c\u040d\u0003D\u001a\u0000\u040d"+ + "\u040e\u0001\u0000\u0000\u0000\u040e\u040f\u0006^\u000b\u0000\u040f\u00cd"+ + "\u0001\u0000\u0000\u0000\u0410\u0411\u0003H\u001c\u0000\u0411\u0412\u0001"+ + "\u0000\u0000\u0000\u0412\u0413\u0006_\u0010\u0000\u0413\u0414\u0006_\f"+ + "\u0000\u0414\u00cf\u0001\u0000\u0000\u0000\u0415\u0416\u0003\u00b4R\u0000"+ + "\u0416\u0417\u0001\u0000\u0000\u0000\u0417\u0418\u0006`\u000e\u0000\u0418"+ + "\u00d1\u0001\u0000\u0000\u0000\u0419\u041a\u0003\u00b6S\u0000\u041a\u041b"+ + "\u0001\u0000\u0000\u0000\u041b\u041c\u0006a\u0011\u0000\u041c\u00d3\u0001"+ + "\u0000\u0000\u0000\u041d\u041e\u0003n/\u0000\u041e\u041f\u0001\u0000\u0000"+ + "\u0000\u041f\u0420\u0006b\u0012\u0000\u0420\u00d5\u0001\u0000\u0000\u0000"+ + "\u0421\u0422\u0003p0\u0000\u0422\u0423\u0001\u0000\u0000\u0000\u0423\u0424"+ + "\u0006c\u0013\u0000\u0424\u00d7\u0001\u0000\u0000\u0000\u0425\u0426\u0003"+ + "j-\u0000\u0426\u0427\u0001\u0000\u0000\u0000\u0427\u0428\u0006d\u0014"+ + "\u0000\u0428\u00d9\u0001\u0000\u0000\u0000\u0429\u042a\u0007\u0010\u0000"+ + "\u0000\u042a\u042b\u0007\u0003\u0000\u0000\u042b\u042c\u0007\u0005\u0000"+ + "\u0000\u042c\u042d\u0007\f\u0000\u0000\u042d\u042e\u0007\u0000\u0000\u0000"+ + "\u042e\u042f\u0007\f\u0000\u0000\u042f\u0430\u0007\u0005\u0000\u0000\u0430"+ + "\u0431\u0007\f\u0000\u0000\u0431\u00db\u0001\u0000\u0000\u0000\u0432\u0436"+ + "\b!\u0000\u0000\u0433\u0434\u0005/\u0000\u0000\u0434\u0436\b\"\u0000\u0000"+ + "\u0435\u0432\u0001\u0000\u0000\u0000\u0435\u0433\u0001\u0000\u0000\u0000"+ + "\u0436\u00dd\u0001\u0000\u0000\u0000\u0437\u0439\u0003\u00dcf\u0000\u0438"+ + "\u0437\u0001\u0000\u0000\u0000\u0439\u043a\u0001\u0000\u0000\u0000\u043a"+ + "\u0438\u0001\u0000\u0000\u0000\u043a\u043b\u0001\u0000\u0000\u0000\u043b"+ + "\u00df\u0001\u0000\u0000\u0000\u043c\u043d\u0003\u00deg\u0000\u043d\u043e"+ + "\u0001\u0000\u0000\u0000\u043e\u043f\u0006h\u0015\u0000\u043f\u00e1\u0001"+ + "\u0000\u0000\u0000\u0440\u0441\u0003^\'\u0000\u0441\u0442\u0001\u0000"+ + "\u0000\u0000\u0442\u0443\u0006i\u0016\u0000\u0443\u00e3\u0001\u0000\u0000"+ + "\u0000\u0444\u0445\u0003B\u0019\u0000\u0445\u0446\u0001\u0000\u0000\u0000"+ + "\u0446\u0447\u0006j\u000b\u0000\u0447\u00e5\u0001\u0000\u0000\u0000\u0448"+ + "\u0449\u0003D\u001a\u0000\u0449\u044a\u0001\u0000\u0000\u0000\u044a\u044b"+ + "\u0006k\u000b\u0000\u044b\u00e7\u0001\u0000\u0000\u0000\u044c\u044d\u0003"+ + "F\u001b\u0000\u044d\u044e\u0001\u0000\u0000\u0000\u044e\u044f\u0006l\u000b"+ + "\u0000\u044f\u00e9\u0001\u0000\u0000\u0000\u0450\u0451\u0003H\u001c\u0000"+ + "\u0451\u0452\u0001\u0000\u0000\u0000\u0452\u0453\u0006m\u0010\u0000\u0453"+ + "\u0454\u0006m\f\u0000\u0454\u00eb\u0001\u0000\u0000\u0000\u0455\u0456"+ + "\u0003t2\u0000\u0456\u0457\u0001\u0000\u0000\u0000\u0457\u0458\u0006n"+ + "\u0017\u0000\u0458\u00ed\u0001\u0000\u0000\u0000\u0459\u045a\u0003p0\u0000"+ + "\u045a\u045b\u0001\u0000\u0000\u0000\u045b\u045c\u0006o\u0013\u0000\u045c"+ + "\u00ef\u0001\u0000\u0000\u0000\u045d\u045e\u0004p\n\u0000\u045e\u045f"+ + "\u0003\u008c>\u0000\u045f\u0460\u0001\u0000\u0000\u0000\u0460\u0461\u0006"+ + "p\u0018\u0000\u0461\u00f1\u0001\u0000\u0000\u0000\u0462\u0463\u0004q\u000b"+ + "\u0000\u0463\u0464\u0003\u00b2Q\u0000\u0464\u0465\u0001\u0000\u0000\u0000"+ + "\u0465\u0466\u0006q\u0019\u0000\u0466\u00f3\u0001\u0000\u0000\u0000\u0467"+ + "\u046c\u0003L\u001e\u0000\u0468\u046c\u0003J\u001d\u0000\u0469\u046c\u0003"+ + "Z%\u0000\u046a\u046c\u0003\u00a6K\u0000\u046b\u0467\u0001\u0000\u0000"+ + "\u0000\u046b\u0468\u0001\u0000\u0000\u0000\u046b\u0469\u0001\u0000\u0000"+ + "\u0000\u046b\u046a\u0001\u0000\u0000\u0000\u046c\u00f5\u0001\u0000\u0000"+ + "\u0000\u046d\u0470\u0003L\u001e\u0000\u046e\u0470\u0003\u00a6K\u0000\u046f"+ + "\u046d\u0001\u0000\u0000\u0000\u046f\u046e\u0001\u0000\u0000\u0000\u0470"+ + "\u0474\u0001\u0000\u0000\u0000\u0471\u0473\u0003\u00f4r\u0000\u0472\u0471"+ + "\u0001\u0000\u0000\u0000\u0473\u0476\u0001\u0000\u0000\u0000\u0474\u0472"+ + "\u0001\u0000\u0000\u0000\u0474\u0475\u0001\u0000\u0000\u0000\u0475\u0481"+ + "\u0001\u0000\u0000\u0000\u0476\u0474\u0001\u0000\u0000\u0000\u0477\u047a"+ + "\u0003Z%\u0000\u0478\u047a\u0003T\"\u0000\u0479\u0477\u0001\u0000\u0000"+ + "\u0000\u0479\u0478\u0001\u0000\u0000\u0000\u047a\u047c\u0001\u0000\u0000"+ + "\u0000\u047b\u047d\u0003\u00f4r\u0000\u047c\u047b\u0001\u0000\u0000\u0000"+ + "\u047d\u047e\u0001\u0000\u0000\u0000\u047e\u047c\u0001\u0000\u0000\u0000"+ + "\u047e\u047f\u0001\u0000\u0000\u0000\u047f\u0481\u0001\u0000\u0000\u0000"+ + "\u0480\u046f\u0001\u0000\u0000\u0000\u0480\u0479\u0001\u0000\u0000\u0000"+ + "\u0481\u00f7\u0001\u0000\u0000\u0000\u0482\u0485\u0003\u00f6s\u0000\u0483"+ + "\u0485\u0003\u00baU\u0000\u0484\u0482\u0001\u0000\u0000\u0000\u0484\u0483"+ + "\u0001\u0000\u0000\u0000\u0485\u0486\u0001\u0000\u0000\u0000\u0486\u0484"+ + "\u0001\u0000\u0000\u0000\u0486\u0487\u0001\u0000\u0000\u0000\u0487\u00f9"+ + "\u0001\u0000\u0000\u0000\u0488\u0489\u0003B\u0019\u0000\u0489\u048a\u0001"+ + "\u0000\u0000\u0000\u048a\u048b\u0006u\u000b\u0000\u048b\u00fb\u0001\u0000"+ + "\u0000\u0000\u048c\u048d\u0003D\u001a\u0000\u048d\u048e\u0001\u0000\u0000"+ + "\u0000\u048e\u048f\u0006v\u000b\u0000\u048f\u00fd\u0001\u0000\u0000\u0000"+ + "\u0490\u0491\u0003F\u001b\u0000\u0491\u0492\u0001\u0000\u0000\u0000\u0492"+ + "\u0493\u0006w\u000b\u0000\u0493\u00ff\u0001\u0000\u0000\u0000\u0494\u0495"+ + "\u0003H\u001c\u0000\u0495\u0496\u0001\u0000\u0000\u0000\u0496\u0497\u0006"+ + "x\u0010\u0000\u0497\u0498\u0006x\f\u0000\u0498\u0101\u0001\u0000\u0000"+ + "\u0000\u0499\u049a\u0003j-\u0000\u049a\u049b\u0001\u0000\u0000\u0000\u049b"+ + "\u049c\u0006y\u0014\u0000\u049c\u0103\u0001\u0000\u0000\u0000\u049d\u049e"+ + "\u0003p0\u0000\u049e\u049f\u0001\u0000\u0000\u0000\u049f\u04a0\u0006z"+ + "\u0013\u0000\u04a0\u0105\u0001\u0000\u0000\u0000\u04a1\u04a2\u0003t2\u0000"+ + "\u04a2\u04a3\u0001\u0000\u0000\u0000\u04a3\u04a4\u0006{\u0017\u0000\u04a4"+ + "\u0107\u0001\u0000\u0000\u0000\u04a5\u04a6\u0004|\f\u0000\u04a6\u04a7"+ + "\u0003\u008c>\u0000\u04a7\u04a8\u0001\u0000\u0000\u0000\u04a8\u04a9\u0006"+ + "|\u0018\u0000\u04a9\u0109\u0001\u0000\u0000\u0000\u04aa\u04ab\u0004}\r"+ + "\u0000\u04ab\u04ac\u0003\u00b2Q\u0000\u04ac\u04ad\u0001\u0000\u0000\u0000"+ + "\u04ad\u04ae\u0006}\u0019\u0000\u04ae\u010b\u0001\u0000\u0000\u0000\u04af"+ + "\u04b0\u0007\f\u0000\u0000\u04b0\u04b1\u0007\u0002\u0000\u0000\u04b1\u010d"+ + "\u0001\u0000\u0000\u0000\u04b2\u04b3\u0003\u00f8t\u0000\u04b3\u04b4\u0001"+ + "\u0000\u0000\u0000\u04b4\u04b5\u0006\u007f\u001a\u0000\u04b5\u010f\u0001"+ + "\u0000\u0000\u0000\u04b6\u04b7\u0003B\u0019\u0000\u04b7\u04b8\u0001\u0000"+ + "\u0000\u0000\u04b8\u04b9\u0006\u0080\u000b\u0000\u04b9\u0111\u0001\u0000"+ + "\u0000\u0000\u04ba\u04bb\u0003D\u001a\u0000\u04bb\u04bc\u0001\u0000\u0000"+ + "\u0000\u04bc\u04bd\u0006\u0081\u000b\u0000\u04bd\u0113\u0001\u0000\u0000"+ + "\u0000\u04be\u04bf\u0003F\u001b\u0000\u04bf\u04c0\u0001\u0000\u0000\u0000"+ + "\u04c0\u04c1\u0006\u0082\u000b\u0000\u04c1\u0115\u0001\u0000\u0000\u0000"+ + "\u04c2\u04c3\u0003H\u001c\u0000\u04c3\u04c4\u0001\u0000\u0000\u0000\u04c4"+ + "\u04c5\u0006\u0083\u0010\u0000\u04c5\u04c6\u0006\u0083\f\u0000\u04c6\u0117"+ + "\u0001\u0000\u0000\u0000\u04c7\u04c8\u0003\u00b4R\u0000\u04c8\u04c9\u0001"+ + "\u0000\u0000\u0000\u04c9\u04ca\u0006\u0084\u000e\u0000\u04ca\u04cb\u0006"+ + "\u0084\u001b\u0000\u04cb\u0119\u0001\u0000\u0000\u0000\u04cc\u04cd\u0007"+ + "\u0007\u0000\u0000\u04cd\u04ce\u0007\t\u0000\u0000\u04ce\u04cf\u0001\u0000"+ + "\u0000\u0000\u04cf\u04d0\u0006\u0085\u001c\u0000\u04d0\u011b\u0001\u0000"+ + "\u0000\u0000\u04d1\u04d2\u0007\u0013\u0000\u0000\u04d2\u04d3\u0007\u0001"+ + "\u0000\u0000\u04d3\u04d4\u0007\u0005\u0000\u0000\u04d4\u04d5\u0007\n\u0000"+ + "\u0000\u04d5\u04d6\u0001\u0000\u0000\u0000\u04d6\u04d7\u0006\u0086\u001c"+ + "\u0000\u04d7\u011d\u0001\u0000\u0000\u0000\u04d8\u04d9\b#\u0000\u0000"+ + "\u04d9\u011f\u0001\u0000\u0000\u0000\u04da\u04dc\u0003\u011e\u0087\u0000"+ + "\u04db\u04da\u0001\u0000\u0000\u0000\u04dc\u04dd\u0001\u0000\u0000\u0000"+ + "\u04dd\u04db\u0001\u0000\u0000\u0000\u04dd\u04de\u0001\u0000\u0000\u0000"+ + "\u04de\u04df\u0001\u0000\u0000\u0000\u04df\u04e0\u0003n/\u0000\u04e0\u04e2"+ + "\u0001\u0000\u0000\u0000\u04e1\u04db\u0001\u0000\u0000\u0000\u04e1\u04e2"+ + "\u0001\u0000\u0000\u0000\u04e2\u04e4\u0001\u0000\u0000\u0000\u04e3\u04e5"+ + "\u0003\u011e\u0087\u0000\u04e4\u04e3\u0001\u0000\u0000\u0000\u04e5\u04e6"+ + "\u0001\u0000\u0000\u0000\u04e6\u04e4\u0001\u0000\u0000\u0000\u04e6\u04e7"+ + "\u0001\u0000\u0000\u0000\u04e7\u0121\u0001\u0000\u0000\u0000\u04e8\u04e9"+ + "\u0003\u0120\u0088\u0000\u04e9\u04ea\u0001\u0000\u0000\u0000\u04ea\u04eb"+ + "\u0006\u0089\u001d\u0000\u04eb\u0123\u0001\u0000\u0000\u0000\u04ec\u04ed"+ + "\u0003B\u0019\u0000\u04ed\u04ee\u0001\u0000\u0000\u0000\u04ee\u04ef\u0006"+ + "\u008a\u000b\u0000\u04ef\u0125\u0001\u0000\u0000\u0000\u04f0\u04f1\u0003"+ + "D\u001a\u0000\u04f1\u04f2\u0001\u0000\u0000\u0000\u04f2\u04f3\u0006\u008b"+ + "\u000b\u0000\u04f3\u0127\u0001\u0000\u0000\u0000\u04f4\u04f5\u0003F\u001b"+ + "\u0000\u04f5\u04f6\u0001\u0000\u0000\u0000\u04f6\u04f7\u0006\u008c\u000b"+ + "\u0000\u04f7\u0129\u0001\u0000\u0000\u0000\u04f8\u04f9\u0003H\u001c\u0000"+ + "\u04f9\u04fa\u0001\u0000\u0000\u0000\u04fa\u04fb\u0006\u008d\u0010\u0000"+ + "\u04fb\u04fc\u0006\u008d\f\u0000\u04fc\u04fd\u0006\u008d\f\u0000\u04fd"+ + "\u012b\u0001\u0000\u0000\u0000\u04fe\u04ff\u0003j-\u0000\u04ff\u0500\u0001"+ + "\u0000\u0000\u0000\u0500\u0501\u0006\u008e\u0014\u0000\u0501\u012d\u0001"+ + "\u0000\u0000\u0000\u0502\u0503\u0003p0\u0000\u0503\u0504\u0001\u0000\u0000"+ + "\u0000\u0504\u0505\u0006\u008f\u0013\u0000\u0505\u012f\u0001\u0000\u0000"+ + "\u0000\u0506\u0507\u0003t2\u0000\u0507\u0508\u0001\u0000\u0000\u0000\u0508"+ + "\u0509\u0006\u0090\u0017\u0000\u0509\u0131\u0001\u0000\u0000\u0000\u050a"+ + "\u050b\u0003\u011c\u0086\u0000\u050b\u050c\u0001\u0000\u0000\u0000\u050c"+ + "\u050d\u0006\u0091\u001e\u0000\u050d\u0133\u0001\u0000\u0000\u0000\u050e"+ + "\u050f\u0003\u00f8t\u0000\u050f\u0510\u0001\u0000\u0000\u0000\u0510\u0511"+ + "\u0006\u0092\u001a\u0000\u0511\u0135\u0001\u0000\u0000\u0000\u0512\u0513"+ + "\u0003\u00bcV\u0000\u0513\u0514\u0001\u0000\u0000\u0000\u0514\u0515\u0006"+ + "\u0093\u001f\u0000\u0515\u0137\u0001\u0000\u0000\u0000\u0516\u0517\u0004"+ + "\u0094\u000e\u0000\u0517\u0518\u0003\u008c>\u0000\u0518\u0519\u0001\u0000"+ + "\u0000\u0000\u0519\u051a\u0006\u0094\u0018\u0000\u051a\u0139\u0001\u0000"+ + "\u0000\u0000\u051b\u051c\u0004\u0095\u000f\u0000\u051c\u051d\u0003\u00b2"+ + "Q\u0000\u051d\u051e\u0001\u0000\u0000\u0000\u051e\u051f\u0006\u0095\u0019"+ + "\u0000\u051f\u013b\u0001\u0000\u0000\u0000\u0520\u0521\u0003B\u0019\u0000"+ + "\u0521\u0522\u0001\u0000\u0000\u0000\u0522\u0523\u0006\u0096\u000b\u0000"+ + "\u0523\u013d\u0001\u0000\u0000\u0000\u0524\u0525\u0003D\u001a\u0000\u0525"+ + "\u0526\u0001\u0000\u0000\u0000\u0526\u0527\u0006\u0097\u000b\u0000\u0527"+ + "\u013f\u0001\u0000\u0000\u0000\u0528\u0529\u0003F\u001b\u0000\u0529\u052a"+ + "\u0001\u0000\u0000\u0000\u052a\u052b\u0006\u0098\u000b\u0000\u052b\u0141"+ + "\u0001\u0000\u0000\u0000\u052c\u052d\u0003H\u001c\u0000\u052d\u052e\u0001"+ + "\u0000\u0000\u0000\u052e\u052f\u0006\u0099\u0010\u0000\u052f\u0530\u0006"+ + "\u0099\f\u0000\u0530\u0143\u0001\u0000\u0000\u0000\u0531\u0532\u0003t"+ + "2\u0000\u0532\u0533\u0001\u0000\u0000\u0000\u0533\u0534\u0006\u009a\u0017"+ + "\u0000\u0534\u0145\u0001\u0000\u0000\u0000\u0535\u0536\u0004\u009b\u0010"+ + "\u0000\u0536\u0537\u0003\u008c>\u0000\u0537\u0538\u0001\u0000\u0000\u0000"+ + "\u0538\u0539\u0006\u009b\u0018\u0000\u0539\u0147\u0001\u0000\u0000\u0000"+ + "\u053a\u053b\u0004\u009c\u0011\u0000\u053b\u053c\u0003\u00b2Q\u0000\u053c"+ + "\u053d\u0001\u0000\u0000\u0000\u053d\u053e\u0006\u009c\u0019\u0000\u053e"+ + "\u0149\u0001\u0000\u0000\u0000\u053f\u0540\u0003\u00bcV\u0000\u0540\u0541"+ + "\u0001\u0000\u0000\u0000\u0541\u0542\u0006\u009d\u001f\u0000\u0542\u014b"+ + "\u0001\u0000\u0000\u0000\u0543\u0544\u0003\u00b8T\u0000\u0544\u0545\u0001"+ + "\u0000\u0000\u0000\u0545\u0546\u0006\u009e \u0000\u0546\u014d\u0001\u0000"+ + "\u0000\u0000\u0547\u0548\u0003B\u0019\u0000\u0548\u0549\u0001\u0000\u0000"+ + "\u0000\u0549\u054a\u0006\u009f\u000b\u0000\u054a\u014f\u0001\u0000\u0000"+ + "\u0000\u054b\u054c\u0003D\u001a\u0000\u054c\u054d\u0001\u0000\u0000\u0000"+ + "\u054d\u054e\u0006\u00a0\u000b\u0000\u054e\u0151\u0001\u0000\u0000\u0000"+ + "\u054f\u0550\u0003F\u001b\u0000\u0550\u0551\u0001\u0000\u0000\u0000\u0551"+ + "\u0552\u0006\u00a1\u000b\u0000\u0552\u0153\u0001\u0000\u0000\u0000\u0553"+ + "\u0554\u0003H\u001c\u0000\u0554\u0555\u0001\u0000\u0000\u0000\u0555\u0556"+ + "\u0006\u00a2\u0010\u0000\u0556\u0557\u0006\u00a2\f\u0000\u0557\u0155\u0001"+ + "\u0000\u0000\u0000\u0558\u0559\u0007\u0001\u0000\u0000\u0559\u055a\u0007"+ + "\t\u0000\u0000\u055a\u055b\u0007\u000f\u0000\u0000\u055b\u055c\u0007\u0007"+ + "\u0000\u0000\u055c\u0157\u0001\u0000\u0000\u0000\u055d\u055e\u0003B\u0019"+ + "\u0000\u055e\u055f\u0001\u0000\u0000\u0000\u055f\u0560\u0006\u00a4\u000b"+ + "\u0000\u0560\u0159\u0001\u0000\u0000\u0000\u0561\u0562\u0003D\u001a\u0000"+ + "\u0562\u0563\u0001\u0000\u0000\u0000\u0563\u0564\u0006\u00a5\u000b\u0000"+ + "\u0564\u015b\u0001\u0000\u0000\u0000\u0565\u0566\u0003F\u001b\u0000\u0566"+ + "\u0567\u0001\u0000\u0000\u0000\u0567\u0568\u0006\u00a6\u000b\u0000\u0568"+ + "\u015d\u0001\u0000\u0000\u0000\u0569\u056a\u0003\u00b6S\u0000\u056a\u056b"+ + "\u0001\u0000\u0000\u0000\u056b\u056c\u0006\u00a7\u0011\u0000\u056c\u056d"+ + "\u0006\u00a7\f\u0000\u056d\u015f\u0001\u0000\u0000\u0000\u056e\u056f\u0003"+ + "n/\u0000\u056f\u0570\u0001\u0000\u0000\u0000\u0570\u0571\u0006\u00a8\u0012"+ + "\u0000\u0571\u0161\u0001\u0000\u0000\u0000\u0572\u0578\u0003T\"\u0000"+ + "\u0573\u0578\u0003J\u001d\u0000\u0574\u0578\u0003t2\u0000\u0575\u0578"+ + "\u0003L\u001e\u0000\u0576\u0578\u0003Z%\u0000\u0577\u0572\u0001\u0000"+ + "\u0000\u0000\u0577\u0573\u0001\u0000\u0000\u0000\u0577\u0574\u0001\u0000"+ + "\u0000\u0000\u0577\u0575\u0001\u0000\u0000\u0000\u0577\u0576\u0001\u0000"+ + "\u0000\u0000\u0578\u0579\u0001\u0000\u0000\u0000\u0579\u0577\u0001\u0000"+ + "\u0000\u0000\u0579\u057a\u0001\u0000\u0000\u0000\u057a\u0163\u0001\u0000"+ + "\u0000\u0000\u057b\u057c\u0003B\u0019\u0000\u057c\u057d\u0001\u0000\u0000"+ + "\u0000\u057d\u057e\u0006\u00aa\u000b\u0000\u057e\u0165\u0001\u0000\u0000"+ + "\u0000\u057f\u0580\u0003D\u001a\u0000\u0580\u0581\u0001\u0000\u0000\u0000"+ + "\u0581\u0582\u0006\u00ab\u000b\u0000\u0582\u0167\u0001\u0000\u0000\u0000"+ + "\u0583\u0584\u0003F\u001b\u0000\u0584\u0585\u0001\u0000\u0000\u0000\u0585"+ + "\u0586\u0006\u00ac\u000b\u0000\u0586\u0169\u0001\u0000\u0000\u0000\u0587"+ + "\u0588\u0003H\u001c\u0000\u0588\u0589\u0001\u0000\u0000\u0000\u0589\u058a"+ + "\u0006\u00ad\u0010\u0000\u058a\u058b\u0006\u00ad\f\u0000\u058b\u016b\u0001"+ + "\u0000\u0000\u0000\u058c\u058d\u0003n/\u0000\u058d\u058e\u0001\u0000\u0000"+ + "\u0000\u058e\u058f\u0006\u00ae\u0012\u0000\u058f\u016d\u0001\u0000\u0000"+ + "\u0000\u0590\u0591\u0003p0\u0000\u0591\u0592\u0001\u0000\u0000\u0000\u0592"+ + "\u0593\u0006\u00af\u0013\u0000\u0593\u016f\u0001\u0000\u0000\u0000\u0594"+ + "\u0595\u0003t2\u0000\u0595\u0596\u0001\u0000\u0000\u0000\u0596\u0597\u0006"+ + "\u00b0\u0017\u0000\u0597\u0171\u0001\u0000\u0000\u0000\u0598\u0599\u0003"+ + "\u011a\u0085\u0000\u0599\u059a\u0001\u0000\u0000\u0000\u059a\u059b\u0006"+ + "\u00b1!\u0000\u059b\u059c\u0006\u00b1\"\u0000\u059c\u0173\u0001\u0000"+ + "\u0000\u0000\u059d\u059e\u0003\u00deg\u0000\u059e\u059f\u0001\u0000\u0000"+ + "\u0000\u059f\u05a0\u0006\u00b2\u0015\u0000\u05a0\u0175\u0001\u0000\u0000"+ + "\u0000\u05a1\u05a2\u0003^\'\u0000\u05a2\u05a3\u0001\u0000\u0000\u0000"+ + "\u05a3\u05a4\u0006\u00b3\u0016\u0000\u05a4\u0177\u0001\u0000\u0000\u0000"+ + "\u05a5\u05a6\u0003B\u0019\u0000\u05a6\u05a7\u0001\u0000\u0000\u0000\u05a7"+ + "\u05a8\u0006\u00b4\u000b\u0000\u05a8\u0179\u0001\u0000\u0000\u0000\u05a9"+ + "\u05aa\u0003D\u001a\u0000\u05aa\u05ab\u0001\u0000\u0000\u0000\u05ab\u05ac"+ + "\u0006\u00b5\u000b\u0000\u05ac\u017b\u0001\u0000\u0000\u0000\u05ad\u05ae"+ + "\u0003F\u001b\u0000\u05ae\u05af\u0001\u0000\u0000\u0000\u05af\u05b0\u0006"+ + "\u00b6\u000b\u0000\u05b0\u017d\u0001\u0000\u0000\u0000\u05b1\u05b2\u0003"+ + "H\u001c\u0000\u05b2\u05b3\u0001\u0000\u0000\u0000\u05b3\u05b4\u0006\u00b7"+ + "\u0010\u0000\u05b4\u05b5\u0006\u00b7\f\u0000\u05b5\u05b6\u0006\u00b7\f"+ + "\u0000\u05b6\u017f\u0001\u0000\u0000\u0000\u05b7\u05b8\u0003p0\u0000\u05b8"+ + "\u05b9\u0001\u0000\u0000\u0000\u05b9\u05ba\u0006\u00b8\u0013\u0000\u05ba"+ + "\u0181\u0001\u0000\u0000\u0000\u05bb\u05bc\u0003t2\u0000\u05bc\u05bd\u0001"+ + "\u0000\u0000\u0000\u05bd\u05be\u0006\u00b9\u0017\u0000\u05be\u0183\u0001"+ + "\u0000\u0000\u0000\u05bf\u05c0\u0003\u00f8t\u0000\u05c0\u05c1\u0001\u0000"+ + "\u0000\u0000\u05c1\u05c2\u0006\u00ba\u001a\u0000\u05c2\u0185\u0001\u0000"+ + "\u0000\u0000\u05c3\u05c4\u0003B\u0019\u0000\u05c4\u05c5\u0001\u0000\u0000"+ + "\u0000\u05c5\u05c6\u0006\u00bb\u000b\u0000\u05c6\u0187\u0001\u0000\u0000"+ + "\u0000\u05c7\u05c8\u0003D\u001a\u0000\u05c8\u05c9\u0001\u0000\u0000\u0000"+ + "\u05c9\u05ca\u0006\u00bc\u000b\u0000\u05ca\u0189\u0001\u0000\u0000\u0000"+ + "\u05cb\u05cc\u0003F\u001b\u0000\u05cc\u05cd\u0001\u0000\u0000\u0000\u05cd"+ + "\u05ce\u0006\u00bd\u000b\u0000\u05ce\u018b\u0001\u0000\u0000\u0000\u05cf"+ + "\u05d0\u0003H\u001c\u0000\u05d0\u05d1\u0001\u0000\u0000\u0000\u05d1\u05d2"+ + "\u0006\u00be\u0010\u0000\u05d2\u05d3\u0006\u00be\f\u0000\u05d3\u018d\u0001"+ + "\u0000\u0000\u0000\u05d4\u05d5\u00036\u0013\u0000\u05d5\u05d6\u0001\u0000"+ + "\u0000\u0000\u05d6\u05d7\u0006\u00bf#\u0000\u05d7\u018f\u0001\u0000\u0000"+ + "\u0000\u05d8\u05d9\u0003\u010c~\u0000\u05d9\u05da\u0001\u0000\u0000\u0000"+ + "\u05da\u05db\u0006\u00c0$\u0000\u05db\u0191\u0001\u0000\u0000\u0000\u05dc"+ + "\u05dd\u0003\u011a\u0085\u0000\u05dd\u05de\u0001\u0000\u0000\u0000\u05de"+ + "\u05df\u0006\u00c1!\u0000\u05df\u05e0\u0006\u00c1\f\u0000\u05e0\u05e1"+ + "\u0006\u00c1\u0000\u0000\u05e1\u0193\u0001\u0000\u0000\u0000\u05e2\u05e3"+ + "\u0007\u0014\u0000\u0000\u05e3\u05e4\u0007\u0002\u0000\u0000\u05e4\u05e5"+ + "\u0007\u0001\u0000\u0000\u05e5\u05e6\u0007\t\u0000\u0000\u05e6\u05e7\u0007"+ + "\u0011\u0000\u0000\u05e7\u05e8\u0001\u0000\u0000\u0000\u05e8\u05e9\u0006"+ + "\u00c2\f\u0000\u05e9\u05ea\u0006\u00c2\u0000\u0000\u05ea\u0195\u0001\u0000"+ + "\u0000\u0000\u05eb\u05ec\u0003\u00b8T\u0000\u05ec\u05ed\u0001\u0000\u0000"+ + "\u0000\u05ed\u05ee\u0006\u00c3 \u0000\u05ee\u0197\u0001\u0000\u0000\u0000"+ + "\u05ef\u05f0\u0003\u00bcV\u0000\u05f0\u05f1\u0001\u0000\u0000\u0000\u05f1"+ + "\u05f2\u0006\u00c4\u001f\u0000\u05f2\u0199\u0001\u0000\u0000\u0000\u05f3"+ + "\u05f4\u0003B\u0019\u0000\u05f4\u05f5\u0001\u0000\u0000\u0000\u05f5\u05f6"+ + "\u0006\u00c5\u000b\u0000\u05f6\u019b\u0001\u0000\u0000\u0000\u05f7\u05f8"+ + "\u0003D\u001a\u0000\u05f8\u05f9\u0001\u0000\u0000\u0000\u05f9\u05fa\u0006"+ + "\u00c6\u000b\u0000\u05fa\u019d\u0001\u0000\u0000\u0000\u05fb\u05fc\u0003"+ + "F\u001b\u0000\u05fc\u05fd\u0001\u0000\u0000\u0000\u05fd\u05fe\u0006\u00c7"+ + "\u000b\u0000\u05fe\u019f\u0001\u0000\u0000\u0000\u05ff\u0600\u0003H\u001c"+ + "\u0000\u0600\u0601\u0001\u0000\u0000\u0000\u0601\u0602\u0006\u00c8\u0010"+ + "\u0000\u0602\u0603\u0006\u00c8\f\u0000\u0603\u01a1\u0001\u0000\u0000\u0000"+ + "\u0604\u0605\u0003\u00deg\u0000\u0605\u0606\u0001\u0000\u0000\u0000\u0606"+ + "\u0607\u0006\u00c9\u0015\u0000\u0607\u0608\u0006\u00c9\f\u0000\u0608\u0609"+ + "\u0006\u00c9%\u0000\u0609\u01a3\u0001\u0000\u0000\u0000\u060a\u060b\u0003"+ + "^\'\u0000\u060b\u060c\u0001\u0000\u0000\u0000\u060c\u060d\u0006\u00ca"+ + "\u0016\u0000\u060d\u060e\u0006\u00ca\f\u0000\u060e\u060f\u0006\u00ca%"+ + "\u0000\u060f\u01a5\u0001\u0000\u0000\u0000\u0610\u0611\u0003B\u0019\u0000"+ + "\u0611\u0612\u0001\u0000\u0000\u0000\u0612\u0613\u0006\u00cb\u000b\u0000"+ + "\u0613\u01a7\u0001\u0000\u0000\u0000\u0614\u0615\u0003D\u001a\u0000\u0615"+ + "\u0616\u0001\u0000\u0000\u0000\u0616\u0617\u0006\u00cc\u000b\u0000\u0617"+ + "\u01a9\u0001\u0000\u0000\u0000\u0618\u0619\u0003F\u001b\u0000\u0619\u061a"+ + "\u0001\u0000\u0000\u0000\u061a\u061b\u0006\u00cd\u000b\u0000\u061b\u01ab"+ + "\u0001\u0000\u0000\u0000\u061c\u061d\u0003n/\u0000\u061d\u061e\u0001\u0000"+ + "\u0000\u0000\u061e\u061f\u0006\u00ce\u0012\u0000\u061f\u0620\u0006\u00ce"+ + "\f\u0000\u0620\u0621\u0006\u00ce\t\u0000\u0621\u01ad\u0001\u0000\u0000"+ + "\u0000\u0622\u0623\u0003p0\u0000\u0623\u0624\u0001\u0000\u0000\u0000\u0624"+ + "\u0625\u0006\u00cf\u0013\u0000\u0625\u0626\u0006\u00cf\f\u0000\u0626\u0627"+ + "\u0006\u00cf\t\u0000\u0627\u01af\u0001\u0000\u0000\u0000\u0628\u0629\u0003"+ + "B\u0019\u0000\u0629\u062a\u0001\u0000\u0000\u0000\u062a\u062b\u0006\u00d0"+ + "\u000b\u0000\u062b\u01b1\u0001\u0000\u0000\u0000\u062c\u062d\u0003D\u001a"+ + "\u0000\u062d\u062e\u0001\u0000\u0000\u0000\u062e\u062f\u0006\u00d1\u000b"+ + "\u0000\u062f\u01b3\u0001\u0000\u0000\u0000\u0630\u0631\u0003F\u001b\u0000"+ + "\u0631\u0632\u0001\u0000\u0000\u0000\u0632\u0633\u0006\u00d2\u000b\u0000"+ + "\u0633\u01b5\u0001\u0000\u0000\u0000\u0634\u0635\u0003\u00bcV\u0000\u0635"+ + "\u0636\u0001\u0000\u0000\u0000\u0636\u0637\u0006\u00d3\f\u0000\u0637\u0638"+ + "\u0006\u00d3\u0000\u0000\u0638\u0639\u0006\u00d3\u001f\u0000\u0639\u01b7"+ + "\u0001\u0000\u0000\u0000\u063a\u063b\u0003\u00b8T\u0000\u063b\u063c\u0001"+ + "\u0000\u0000\u0000\u063c\u063d\u0006\u00d4\f\u0000\u063d\u063e\u0006\u00d4"+ + "\u0000\u0000\u063e\u063f\u0006\u00d4 \u0000\u063f\u01b9\u0001\u0000\u0000"+ + "\u0000\u0640\u0641\u0003d*\u0000\u0641\u0642\u0001\u0000\u0000\u0000\u0642"+ + "\u0643\u0006\u00d5\f\u0000\u0643\u0644\u0006\u00d5\u0000\u0000\u0644\u0645"+ + "\u0006\u00d5&\u0000\u0645\u01bb\u0001\u0000\u0000\u0000\u0646\u0647\u0003"+ + "H\u001c\u0000\u0647\u0648\u0001\u0000\u0000\u0000\u0648\u0649\u0006\u00d6"+ + "\u0010\u0000\u0649\u064a\u0006\u00d6\f\u0000\u064a\u01bd\u0001\u0000\u0000"+ "\u0000B\u0000\u0001\u0002\u0003\u0004\u0005\u0006\u0007\b\t\n\u000b\f"+ - "\r\u000e\u000f\u028f\u0299\u029d\u02a0\u02a9\u02ab\u02b6\u02c9\u02ce\u02d7"+ - "\u02de\u02e3\u02e5\u02f0\u02f8\u02fb\u02fd\u0302\u0307\u030d\u0314\u0319"+ - "\u031f\u0322\u032a\u032e\u03ae\u03b3\u03ba\u03bc\u03cc\u03d1\u03d6\u03d8"+ - "\u03de\u042b\u0430\u0461\u0465\u046a\u046f\u0474\u0476\u047a\u047c\u04d3"+ - "\u04d7\u04dc\u056d\u056f\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006"+ + "\r\u000e\u000f\u0293\u029d\u02a1\u02a4\u02ad\u02af\u02ba\u02cd\u02d2\u02db"+ + "\u02e2\u02e7\u02e9\u02f4\u02fc\u02ff\u0301\u0306\u030b\u0311\u0318\u031d"+ + "\u0323\u0326\u032e\u0332\u03b8\u03bd\u03c4\u03c6\u03d6\u03db\u03e0\u03e2"+ + "\u03e8\u0435\u043a\u046b\u046f\u0474\u0479\u047e\u0480\u0484\u0486\u04dd"+ + "\u04e1\u04e6\u0577\u0579\'\u0005\u0001\u0000\u0005\u0004\u0000\u0005\u0006"+ "\u0000\u0005\u0002\u0000\u0005\u0003\u0000\u0005\b\u0000\u0005\u0005\u0000"+ "\u0005\t\u0000\u0005\u000b\u0000\u0005\u000e\u0000\u0005\r\u0000\u0000"+ - "\u0001\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007F\u0000\u0005\u0000"+ - "\u0000\u0007\u001d\u0000\u0007G\u0000\u0007&\u0000\u0007\'\u0000\u0007"+ - "$\u0000\u0007Q\u0000\u0007\u001e\u0000\u0007)\u0000\u00075\u0000\u0007"+ - "E\u0000\u0007U\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007_\u0000\u0007"+ - "^\u0000\u0007I\u0000\u0007H\u0000\u0007]\u0000\u0005\f\u0000\u0007\u0014"+ - "\u0000\u0007Y\u0000\u0005\u000f\u0000\u0007!\u0000"; + "\u0001\u0000\u0004\u0000\u0000\u0007\u0010\u0000\u0007H\u0000\u0005\u0000"+ + "\u0000\u0007\u001d\u0000\u0007I\u0000\u0007&\u0000\u0007\'\u0000\u0007"+ + "$\u0000\u0007S\u0000\u0007\u001e\u0000\u0007)\u0000\u00075\u0000\u0007"+ + "G\u0000\u0007W\u0000\u0005\n\u0000\u0005\u0007\u0000\u0007a\u0000\u0007"+ + "`\u0000\u0007K\u0000\u0007J\u0000\u0007_\u0000\u0005\f\u0000\u0007\u0014"+ + "\u0000\u0007[\u0000\u0005\u000f\u0000\u0007!\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 492df7fbc1608..c9db129e08ba2 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -70,6 +70,8 @@ null '%' null null +null +null ']' null null @@ -199,6 +201,8 @@ MINUS ASTERISK SLASH PERCENT +LEFT_BRACES +RIGHT_BRACES NAMED_OR_POSITIONAL_PARAM OPENING_BRACKET CLOSING_BRACKET @@ -274,6 +278,8 @@ operatorExpression primaryExpression functionExpression functionName +mapExpression +entryExpression dataType rowCommand fields @@ -328,4 +334,4 @@ joinPredicate atn: -[4, 1, 128, 627, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 138, 8, 1, 10, 1, 12, 1, 141, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 149, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 169, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 181, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 188, 8, 5, 10, 5, 12, 5, 191, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 198, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 203, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 211, 8, 5, 10, 5, 12, 5, 214, 9, 5, 1, 6, 1, 6, 3, 6, 218, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 225, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 230, 8, 6, 1, 7, 1, 7, 1, 7, 3, 7, 235, 8, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 245, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 251, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 259, 8, 9, 10, 9, 12, 9, 262, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 272, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 277, 8, 10, 10, 10, 12, 10, 280, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 288, 8, 11, 10, 11, 12, 11, 291, 9, 11, 3, 11, 293, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 5, 15, 307, 8, 15, 10, 15, 12, 15, 310, 9, 15, 1, 16, 1, 16, 1, 16, 3, 16, 315, 8, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 5, 17, 323, 8, 17, 10, 17, 12, 17, 326, 9, 17, 1, 17, 3, 17, 329, 8, 17, 1, 18, 1, 18, 1, 18, 3, 18, 334, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 1, 21, 5, 21, 346, 8, 21, 10, 21, 12, 21, 349, 9, 21, 1, 22, 1, 22, 1, 22, 1, 22, 5, 22, 355, 8, 22, 10, 22, 12, 22, 358, 9, 22, 1, 22, 3, 22, 361, 8, 22, 1, 22, 1, 22, 3, 22, 365, 8, 22, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 3, 24, 372, 8, 24, 1, 24, 1, 24, 3, 24, 376, 8, 24, 1, 25, 1, 25, 1, 25, 5, 25, 381, 8, 25, 10, 25, 12, 25, 384, 9, 25, 1, 26, 1, 26, 1, 26, 3, 26, 389, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 394, 8, 27, 10, 27, 12, 27, 397, 9, 27, 1, 28, 1, 28, 1, 28, 5, 28, 402, 8, 28, 10, 28, 12, 28, 405, 9, 28, 1, 29, 1, 29, 1, 29, 5, 29, 410, 8, 29, 10, 29, 12, 29, 413, 9, 29, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 3, 31, 420, 8, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 435, 8, 32, 10, 32, 12, 32, 438, 9, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 446, 8, 32, 10, 32, 12, 32, 449, 9, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 1, 32, 5, 32, 457, 8, 32, 10, 32, 12, 32, 460, 9, 32, 1, 32, 1, 32, 3, 32, 464, 8, 32, 1, 33, 1, 33, 3, 33, 468, 8, 33, 1, 34, 1, 34, 1, 34, 3, 34, 473, 8, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 1, 36, 5, 36, 482, 8, 36, 10, 36, 12, 36, 485, 9, 36, 1, 37, 1, 37, 3, 37, 489, 8, 37, 1, 37, 1, 37, 3, 37, 493, 8, 37, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 40, 1, 40, 5, 40, 505, 8, 40, 10, 40, 12, 40, 508, 9, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 3, 42, 518, 8, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 5, 45, 530, 8, 45, 10, 45, 12, 45, 533, 9, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 3, 48, 543, 8, 48, 1, 49, 3, 49, 546, 8, 49, 1, 49, 1, 49, 1, 50, 3, 50, 551, 8, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 3, 56, 573, 8, 56, 1, 56, 1, 56, 1, 56, 1, 56, 5, 56, 579, 8, 56, 10, 56, 12, 56, 582, 9, 56, 3, 56, 584, 8, 56, 1, 57, 1, 57, 1, 57, 3, 57, 589, 8, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 3, 59, 602, 8, 59, 1, 60, 3, 60, 605, 8, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 3, 61, 614, 8, 61, 1, 62, 1, 62, 1, 62, 1, 62, 5, 62, 620, 8, 62, 10, 62, 12, 62, 623, 9, 62, 1, 63, 1, 63, 1, 63, 0, 4, 2, 10, 18, 20, 64, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 81, 81, 1, 0, 72, 73, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 654, 0, 128, 1, 0, 0, 0, 2, 131, 1, 0, 0, 0, 4, 148, 1, 0, 0, 0, 6, 168, 1, 0, 0, 0, 8, 170, 1, 0, 0, 0, 10, 202, 1, 0, 0, 0, 12, 229, 1, 0, 0, 0, 14, 231, 1, 0, 0, 0, 16, 244, 1, 0, 0, 0, 18, 250, 1, 0, 0, 0, 20, 271, 1, 0, 0, 0, 22, 281, 1, 0, 0, 0, 24, 296, 1, 0, 0, 0, 26, 298, 1, 0, 0, 0, 28, 300, 1, 0, 0, 0, 30, 303, 1, 0, 0, 0, 32, 314, 1, 0, 0, 0, 34, 318, 1, 0, 0, 0, 36, 333, 1, 0, 0, 0, 38, 337, 1, 0, 0, 0, 40, 339, 1, 0, 0, 0, 42, 341, 1, 0, 0, 0, 44, 350, 1, 0, 0, 0, 46, 366, 1, 0, 0, 0, 48, 369, 1, 0, 0, 0, 50, 377, 1, 0, 0, 0, 52, 385, 1, 0, 0, 0, 54, 390, 1, 0, 0, 0, 56, 398, 1, 0, 0, 0, 58, 406, 1, 0, 0, 0, 60, 414, 1, 0, 0, 0, 62, 419, 1, 0, 0, 0, 64, 463, 1, 0, 0, 0, 66, 467, 1, 0, 0, 0, 68, 472, 1, 0, 0, 0, 70, 474, 1, 0, 0, 0, 72, 477, 1, 0, 0, 0, 74, 486, 1, 0, 0, 0, 76, 494, 1, 0, 0, 0, 78, 497, 1, 0, 0, 0, 80, 500, 1, 0, 0, 0, 82, 509, 1, 0, 0, 0, 84, 513, 1, 0, 0, 0, 86, 519, 1, 0, 0, 0, 88, 523, 1, 0, 0, 0, 90, 526, 1, 0, 0, 0, 92, 534, 1, 0, 0, 0, 94, 538, 1, 0, 0, 0, 96, 542, 1, 0, 0, 0, 98, 545, 1, 0, 0, 0, 100, 550, 1, 0, 0, 0, 102, 554, 1, 0, 0, 0, 104, 556, 1, 0, 0, 0, 106, 558, 1, 0, 0, 0, 108, 561, 1, 0, 0, 0, 110, 565, 1, 0, 0, 0, 112, 568, 1, 0, 0, 0, 114, 588, 1, 0, 0, 0, 116, 592, 1, 0, 0, 0, 118, 597, 1, 0, 0, 0, 120, 604, 1, 0, 0, 0, 122, 610, 1, 0, 0, 0, 124, 615, 1, 0, 0, 0, 126, 624, 1, 0, 0, 0, 128, 129, 3, 2, 1, 0, 129, 130, 5, 0, 0, 1, 130, 1, 1, 0, 0, 0, 131, 132, 6, 1, -1, 0, 132, 133, 3, 4, 2, 0, 133, 139, 1, 0, 0, 0, 134, 135, 10, 1, 0, 0, 135, 136, 5, 29, 0, 0, 136, 138, 3, 6, 3, 0, 137, 134, 1, 0, 0, 0, 138, 141, 1, 0, 0, 0, 139, 137, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 3, 1, 0, 0, 0, 141, 139, 1, 0, 0, 0, 142, 149, 3, 106, 53, 0, 143, 149, 3, 34, 17, 0, 144, 149, 3, 28, 14, 0, 145, 149, 3, 110, 55, 0, 146, 147, 4, 2, 1, 0, 147, 149, 3, 44, 22, 0, 148, 142, 1, 0, 0, 0, 148, 143, 1, 0, 0, 0, 148, 144, 1, 0, 0, 0, 148, 145, 1, 0, 0, 0, 148, 146, 1, 0, 0, 0, 149, 5, 1, 0, 0, 0, 150, 169, 3, 46, 23, 0, 151, 169, 3, 8, 4, 0, 152, 169, 3, 76, 38, 0, 153, 169, 3, 70, 35, 0, 154, 169, 3, 48, 24, 0, 155, 169, 3, 72, 36, 0, 156, 169, 3, 78, 39, 0, 157, 169, 3, 80, 40, 0, 158, 169, 3, 84, 42, 0, 159, 169, 3, 86, 43, 0, 160, 169, 3, 112, 56, 0, 161, 169, 3, 88, 44, 0, 162, 163, 4, 3, 2, 0, 163, 169, 3, 118, 59, 0, 164, 165, 4, 3, 3, 0, 165, 169, 3, 116, 58, 0, 166, 167, 4, 3, 4, 0, 167, 169, 3, 120, 60, 0, 168, 150, 1, 0, 0, 0, 168, 151, 1, 0, 0, 0, 168, 152, 1, 0, 0, 0, 168, 153, 1, 0, 0, 0, 168, 154, 1, 0, 0, 0, 168, 155, 1, 0, 0, 0, 168, 156, 1, 0, 0, 0, 168, 157, 1, 0, 0, 0, 168, 158, 1, 0, 0, 0, 168, 159, 1, 0, 0, 0, 168, 160, 1, 0, 0, 0, 168, 161, 1, 0, 0, 0, 168, 162, 1, 0, 0, 0, 168, 164, 1, 0, 0, 0, 168, 166, 1, 0, 0, 0, 169, 7, 1, 0, 0, 0, 170, 171, 5, 16, 0, 0, 171, 172, 3, 10, 5, 0, 172, 9, 1, 0, 0, 0, 173, 174, 6, 5, -1, 0, 174, 175, 5, 49, 0, 0, 175, 203, 3, 10, 5, 8, 176, 203, 3, 16, 8, 0, 177, 203, 3, 12, 6, 0, 178, 180, 3, 16, 8, 0, 179, 181, 5, 49, 0, 0, 180, 179, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 182, 1, 0, 0, 0, 182, 183, 5, 44, 0, 0, 183, 184, 5, 48, 0, 0, 184, 189, 3, 16, 8, 0, 185, 186, 5, 39, 0, 0, 186, 188, 3, 16, 8, 0, 187, 185, 1, 0, 0, 0, 188, 191, 1, 0, 0, 0, 189, 187, 1, 0, 0, 0, 189, 190, 1, 0, 0, 0, 190, 192, 1, 0, 0, 0, 191, 189, 1, 0, 0, 0, 192, 193, 5, 55, 0, 0, 193, 203, 1, 0, 0, 0, 194, 195, 3, 16, 8, 0, 195, 197, 5, 45, 0, 0, 196, 198, 5, 49, 0, 0, 197, 196, 1, 0, 0, 0, 197, 198, 1, 0, 0, 0, 198, 199, 1, 0, 0, 0, 199, 200, 5, 50, 0, 0, 200, 203, 1, 0, 0, 0, 201, 203, 3, 14, 7, 0, 202, 173, 1, 0, 0, 0, 202, 176, 1, 0, 0, 0, 202, 177, 1, 0, 0, 0, 202, 178, 1, 0, 0, 0, 202, 194, 1, 0, 0, 0, 202, 201, 1, 0, 0, 0, 203, 212, 1, 0, 0, 0, 204, 205, 10, 5, 0, 0, 205, 206, 5, 34, 0, 0, 206, 211, 3, 10, 5, 6, 207, 208, 10, 4, 0, 0, 208, 209, 5, 52, 0, 0, 209, 211, 3, 10, 5, 5, 210, 204, 1, 0, 0, 0, 210, 207, 1, 0, 0, 0, 211, 214, 1, 0, 0, 0, 212, 210, 1, 0, 0, 0, 212, 213, 1, 0, 0, 0, 213, 11, 1, 0, 0, 0, 214, 212, 1, 0, 0, 0, 215, 217, 3, 16, 8, 0, 216, 218, 5, 49, 0, 0, 217, 216, 1, 0, 0, 0, 217, 218, 1, 0, 0, 0, 218, 219, 1, 0, 0, 0, 219, 220, 5, 47, 0, 0, 220, 221, 3, 102, 51, 0, 221, 230, 1, 0, 0, 0, 222, 224, 3, 16, 8, 0, 223, 225, 5, 49, 0, 0, 224, 223, 1, 0, 0, 0, 224, 225, 1, 0, 0, 0, 225, 226, 1, 0, 0, 0, 226, 227, 5, 54, 0, 0, 227, 228, 3, 102, 51, 0, 228, 230, 1, 0, 0, 0, 229, 215, 1, 0, 0, 0, 229, 222, 1, 0, 0, 0, 230, 13, 1, 0, 0, 0, 231, 234, 3, 54, 27, 0, 232, 233, 5, 37, 0, 0, 233, 235, 3, 26, 13, 0, 234, 232, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 236, 1, 0, 0, 0, 236, 237, 5, 38, 0, 0, 237, 238, 3, 64, 32, 0, 238, 15, 1, 0, 0, 0, 239, 245, 3, 18, 9, 0, 240, 241, 3, 18, 9, 0, 241, 242, 3, 104, 52, 0, 242, 243, 3, 18, 9, 0, 243, 245, 1, 0, 0, 0, 244, 239, 1, 0, 0, 0, 244, 240, 1, 0, 0, 0, 245, 17, 1, 0, 0, 0, 246, 247, 6, 9, -1, 0, 247, 251, 3, 20, 10, 0, 248, 249, 7, 0, 0, 0, 249, 251, 3, 18, 9, 3, 250, 246, 1, 0, 0, 0, 250, 248, 1, 0, 0, 0, 251, 260, 1, 0, 0, 0, 252, 253, 10, 2, 0, 0, 253, 254, 7, 1, 0, 0, 254, 259, 3, 18, 9, 3, 255, 256, 10, 1, 0, 0, 256, 257, 7, 0, 0, 0, 257, 259, 3, 18, 9, 2, 258, 252, 1, 0, 0, 0, 258, 255, 1, 0, 0, 0, 259, 262, 1, 0, 0, 0, 260, 258, 1, 0, 0, 0, 260, 261, 1, 0, 0, 0, 261, 19, 1, 0, 0, 0, 262, 260, 1, 0, 0, 0, 263, 264, 6, 10, -1, 0, 264, 272, 3, 64, 32, 0, 265, 272, 3, 54, 27, 0, 266, 272, 3, 22, 11, 0, 267, 268, 5, 48, 0, 0, 268, 269, 3, 10, 5, 0, 269, 270, 5, 55, 0, 0, 270, 272, 1, 0, 0, 0, 271, 263, 1, 0, 0, 0, 271, 265, 1, 0, 0, 0, 271, 266, 1, 0, 0, 0, 271, 267, 1, 0, 0, 0, 272, 278, 1, 0, 0, 0, 273, 274, 10, 1, 0, 0, 274, 275, 5, 37, 0, 0, 275, 277, 3, 26, 13, 0, 276, 273, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 21, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 281, 282, 3, 24, 12, 0, 282, 292, 5, 48, 0, 0, 283, 293, 5, 66, 0, 0, 284, 289, 3, 10, 5, 0, 285, 286, 5, 39, 0, 0, 286, 288, 3, 10, 5, 0, 287, 285, 1, 0, 0, 0, 288, 291, 1, 0, 0, 0, 289, 287, 1, 0, 0, 0, 289, 290, 1, 0, 0, 0, 290, 293, 1, 0, 0, 0, 291, 289, 1, 0, 0, 0, 292, 283, 1, 0, 0, 0, 292, 284, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 295, 5, 55, 0, 0, 295, 23, 1, 0, 0, 0, 296, 297, 3, 68, 34, 0, 297, 25, 1, 0, 0, 0, 298, 299, 3, 60, 30, 0, 299, 27, 1, 0, 0, 0, 300, 301, 5, 12, 0, 0, 301, 302, 3, 30, 15, 0, 302, 29, 1, 0, 0, 0, 303, 308, 3, 32, 16, 0, 304, 305, 5, 39, 0, 0, 305, 307, 3, 32, 16, 0, 306, 304, 1, 0, 0, 0, 307, 310, 1, 0, 0, 0, 308, 306, 1, 0, 0, 0, 308, 309, 1, 0, 0, 0, 309, 31, 1, 0, 0, 0, 310, 308, 1, 0, 0, 0, 311, 312, 3, 54, 27, 0, 312, 313, 5, 36, 0, 0, 313, 315, 1, 0, 0, 0, 314, 311, 1, 0, 0, 0, 314, 315, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 317, 3, 10, 5, 0, 317, 33, 1, 0, 0, 0, 318, 319, 5, 6, 0, 0, 319, 324, 3, 36, 18, 0, 320, 321, 5, 39, 0, 0, 321, 323, 3, 36, 18, 0, 322, 320, 1, 0, 0, 0, 323, 326, 1, 0, 0, 0, 324, 322, 1, 0, 0, 0, 324, 325, 1, 0, 0, 0, 325, 328, 1, 0, 0, 0, 326, 324, 1, 0, 0, 0, 327, 329, 3, 42, 21, 0, 328, 327, 1, 0, 0, 0, 328, 329, 1, 0, 0, 0, 329, 35, 1, 0, 0, 0, 330, 331, 3, 38, 19, 0, 331, 332, 5, 38, 0, 0, 332, 334, 1, 0, 0, 0, 333, 330, 1, 0, 0, 0, 333, 334, 1, 0, 0, 0, 334, 335, 1, 0, 0, 0, 335, 336, 3, 40, 20, 0, 336, 37, 1, 0, 0, 0, 337, 338, 5, 81, 0, 0, 338, 39, 1, 0, 0, 0, 339, 340, 7, 2, 0, 0, 340, 41, 1, 0, 0, 0, 341, 342, 5, 80, 0, 0, 342, 347, 5, 81, 0, 0, 343, 344, 5, 39, 0, 0, 344, 346, 5, 81, 0, 0, 345, 343, 1, 0, 0, 0, 346, 349, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 347, 348, 1, 0, 0, 0, 348, 43, 1, 0, 0, 0, 349, 347, 1, 0, 0, 0, 350, 351, 5, 19, 0, 0, 351, 356, 3, 36, 18, 0, 352, 353, 5, 39, 0, 0, 353, 355, 3, 36, 18, 0, 354, 352, 1, 0, 0, 0, 355, 358, 1, 0, 0, 0, 356, 354, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 359, 361, 3, 50, 25, 0, 360, 359, 1, 0, 0, 0, 360, 361, 1, 0, 0, 0, 361, 364, 1, 0, 0, 0, 362, 363, 5, 33, 0, 0, 363, 365, 3, 30, 15, 0, 364, 362, 1, 0, 0, 0, 364, 365, 1, 0, 0, 0, 365, 45, 1, 0, 0, 0, 366, 367, 5, 4, 0, 0, 367, 368, 3, 30, 15, 0, 368, 47, 1, 0, 0, 0, 369, 371, 5, 15, 0, 0, 370, 372, 3, 50, 25, 0, 371, 370, 1, 0, 0, 0, 371, 372, 1, 0, 0, 0, 372, 375, 1, 0, 0, 0, 373, 374, 5, 33, 0, 0, 374, 376, 3, 30, 15, 0, 375, 373, 1, 0, 0, 0, 375, 376, 1, 0, 0, 0, 376, 49, 1, 0, 0, 0, 377, 382, 3, 52, 26, 0, 378, 379, 5, 39, 0, 0, 379, 381, 3, 52, 26, 0, 380, 378, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 382, 383, 1, 0, 0, 0, 383, 51, 1, 0, 0, 0, 384, 382, 1, 0, 0, 0, 385, 388, 3, 32, 16, 0, 386, 387, 5, 16, 0, 0, 387, 389, 3, 10, 5, 0, 388, 386, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 53, 1, 0, 0, 0, 390, 395, 3, 68, 34, 0, 391, 392, 5, 41, 0, 0, 392, 394, 3, 68, 34, 0, 393, 391, 1, 0, 0, 0, 394, 397, 1, 0, 0, 0, 395, 393, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 55, 1, 0, 0, 0, 397, 395, 1, 0, 0, 0, 398, 403, 3, 62, 31, 0, 399, 400, 5, 41, 0, 0, 400, 402, 3, 62, 31, 0, 401, 399, 1, 0, 0, 0, 402, 405, 1, 0, 0, 0, 403, 401, 1, 0, 0, 0, 403, 404, 1, 0, 0, 0, 404, 57, 1, 0, 0, 0, 405, 403, 1, 0, 0, 0, 406, 411, 3, 56, 28, 0, 407, 408, 5, 39, 0, 0, 408, 410, 3, 56, 28, 0, 409, 407, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 411, 412, 1, 0, 0, 0, 412, 59, 1, 0, 0, 0, 413, 411, 1, 0, 0, 0, 414, 415, 7, 3, 0, 0, 415, 61, 1, 0, 0, 0, 416, 420, 5, 85, 0, 0, 417, 418, 4, 31, 10, 0, 418, 420, 3, 66, 33, 0, 419, 416, 1, 0, 0, 0, 419, 417, 1, 0, 0, 0, 420, 63, 1, 0, 0, 0, 421, 464, 5, 50, 0, 0, 422, 423, 3, 100, 50, 0, 423, 424, 5, 72, 0, 0, 424, 464, 1, 0, 0, 0, 425, 464, 3, 98, 49, 0, 426, 464, 3, 100, 50, 0, 427, 464, 3, 94, 47, 0, 428, 464, 3, 66, 33, 0, 429, 464, 3, 102, 51, 0, 430, 431, 5, 70, 0, 0, 431, 436, 3, 96, 48, 0, 432, 433, 5, 39, 0, 0, 433, 435, 3, 96, 48, 0, 434, 432, 1, 0, 0, 0, 435, 438, 1, 0, 0, 0, 436, 434, 1, 0, 0, 0, 436, 437, 1, 0, 0, 0, 437, 439, 1, 0, 0, 0, 438, 436, 1, 0, 0, 0, 439, 440, 5, 71, 0, 0, 440, 464, 1, 0, 0, 0, 441, 442, 5, 70, 0, 0, 442, 447, 3, 94, 47, 0, 443, 444, 5, 39, 0, 0, 444, 446, 3, 94, 47, 0, 445, 443, 1, 0, 0, 0, 446, 449, 1, 0, 0, 0, 447, 445, 1, 0, 0, 0, 447, 448, 1, 0, 0, 0, 448, 450, 1, 0, 0, 0, 449, 447, 1, 0, 0, 0, 450, 451, 5, 71, 0, 0, 451, 464, 1, 0, 0, 0, 452, 453, 5, 70, 0, 0, 453, 458, 3, 102, 51, 0, 454, 455, 5, 39, 0, 0, 455, 457, 3, 102, 51, 0, 456, 454, 1, 0, 0, 0, 457, 460, 1, 0, 0, 0, 458, 456, 1, 0, 0, 0, 458, 459, 1, 0, 0, 0, 459, 461, 1, 0, 0, 0, 460, 458, 1, 0, 0, 0, 461, 462, 5, 71, 0, 0, 462, 464, 1, 0, 0, 0, 463, 421, 1, 0, 0, 0, 463, 422, 1, 0, 0, 0, 463, 425, 1, 0, 0, 0, 463, 426, 1, 0, 0, 0, 463, 427, 1, 0, 0, 0, 463, 428, 1, 0, 0, 0, 463, 429, 1, 0, 0, 0, 463, 430, 1, 0, 0, 0, 463, 441, 1, 0, 0, 0, 463, 452, 1, 0, 0, 0, 464, 65, 1, 0, 0, 0, 465, 468, 5, 53, 0, 0, 466, 468, 5, 69, 0, 0, 467, 465, 1, 0, 0, 0, 467, 466, 1, 0, 0, 0, 468, 67, 1, 0, 0, 0, 469, 473, 3, 60, 30, 0, 470, 471, 4, 34, 11, 0, 471, 473, 3, 66, 33, 0, 472, 469, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 473, 69, 1, 0, 0, 0, 474, 475, 5, 9, 0, 0, 475, 476, 5, 31, 0, 0, 476, 71, 1, 0, 0, 0, 477, 478, 5, 14, 0, 0, 478, 483, 3, 74, 37, 0, 479, 480, 5, 39, 0, 0, 480, 482, 3, 74, 37, 0, 481, 479, 1, 0, 0, 0, 482, 485, 1, 0, 0, 0, 483, 481, 1, 0, 0, 0, 483, 484, 1, 0, 0, 0, 484, 73, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 486, 488, 3, 10, 5, 0, 487, 489, 7, 4, 0, 0, 488, 487, 1, 0, 0, 0, 488, 489, 1, 0, 0, 0, 489, 492, 1, 0, 0, 0, 490, 491, 5, 51, 0, 0, 491, 493, 7, 5, 0, 0, 492, 490, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 75, 1, 0, 0, 0, 494, 495, 5, 8, 0, 0, 495, 496, 3, 58, 29, 0, 496, 77, 1, 0, 0, 0, 497, 498, 5, 2, 0, 0, 498, 499, 3, 58, 29, 0, 499, 79, 1, 0, 0, 0, 500, 501, 5, 11, 0, 0, 501, 506, 3, 82, 41, 0, 502, 503, 5, 39, 0, 0, 503, 505, 3, 82, 41, 0, 504, 502, 1, 0, 0, 0, 505, 508, 1, 0, 0, 0, 506, 504, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 81, 1, 0, 0, 0, 508, 506, 1, 0, 0, 0, 509, 510, 3, 56, 28, 0, 510, 511, 5, 89, 0, 0, 511, 512, 3, 56, 28, 0, 512, 83, 1, 0, 0, 0, 513, 514, 5, 1, 0, 0, 514, 515, 3, 20, 10, 0, 515, 517, 3, 102, 51, 0, 516, 518, 3, 90, 45, 0, 517, 516, 1, 0, 0, 0, 517, 518, 1, 0, 0, 0, 518, 85, 1, 0, 0, 0, 519, 520, 5, 7, 0, 0, 520, 521, 3, 20, 10, 0, 521, 522, 3, 102, 51, 0, 522, 87, 1, 0, 0, 0, 523, 524, 5, 10, 0, 0, 524, 525, 3, 54, 27, 0, 525, 89, 1, 0, 0, 0, 526, 531, 3, 92, 46, 0, 527, 528, 5, 39, 0, 0, 528, 530, 3, 92, 46, 0, 529, 527, 1, 0, 0, 0, 530, 533, 1, 0, 0, 0, 531, 529, 1, 0, 0, 0, 531, 532, 1, 0, 0, 0, 532, 91, 1, 0, 0, 0, 533, 531, 1, 0, 0, 0, 534, 535, 3, 60, 30, 0, 535, 536, 5, 36, 0, 0, 536, 537, 3, 64, 32, 0, 537, 93, 1, 0, 0, 0, 538, 539, 7, 6, 0, 0, 539, 95, 1, 0, 0, 0, 540, 543, 3, 98, 49, 0, 541, 543, 3, 100, 50, 0, 542, 540, 1, 0, 0, 0, 542, 541, 1, 0, 0, 0, 543, 97, 1, 0, 0, 0, 544, 546, 7, 0, 0, 0, 545, 544, 1, 0, 0, 0, 545, 546, 1, 0, 0, 0, 546, 547, 1, 0, 0, 0, 547, 548, 5, 32, 0, 0, 548, 99, 1, 0, 0, 0, 549, 551, 7, 0, 0, 0, 550, 549, 1, 0, 0, 0, 550, 551, 1, 0, 0, 0, 551, 552, 1, 0, 0, 0, 552, 553, 5, 31, 0, 0, 553, 101, 1, 0, 0, 0, 554, 555, 5, 30, 0, 0, 555, 103, 1, 0, 0, 0, 556, 557, 7, 7, 0, 0, 557, 105, 1, 0, 0, 0, 558, 559, 5, 5, 0, 0, 559, 560, 3, 108, 54, 0, 560, 107, 1, 0, 0, 0, 561, 562, 5, 70, 0, 0, 562, 563, 3, 2, 1, 0, 563, 564, 5, 71, 0, 0, 564, 109, 1, 0, 0, 0, 565, 566, 5, 13, 0, 0, 566, 567, 5, 105, 0, 0, 567, 111, 1, 0, 0, 0, 568, 569, 5, 3, 0, 0, 569, 572, 5, 95, 0, 0, 570, 571, 5, 93, 0, 0, 571, 573, 3, 56, 28, 0, 572, 570, 1, 0, 0, 0, 572, 573, 1, 0, 0, 0, 573, 583, 1, 0, 0, 0, 574, 575, 5, 94, 0, 0, 575, 580, 3, 114, 57, 0, 576, 577, 5, 39, 0, 0, 577, 579, 3, 114, 57, 0, 578, 576, 1, 0, 0, 0, 579, 582, 1, 0, 0, 0, 580, 578, 1, 0, 0, 0, 580, 581, 1, 0, 0, 0, 581, 584, 1, 0, 0, 0, 582, 580, 1, 0, 0, 0, 583, 574, 1, 0, 0, 0, 583, 584, 1, 0, 0, 0, 584, 113, 1, 0, 0, 0, 585, 586, 3, 56, 28, 0, 586, 587, 5, 36, 0, 0, 587, 589, 1, 0, 0, 0, 588, 585, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 591, 3, 56, 28, 0, 591, 115, 1, 0, 0, 0, 592, 593, 5, 18, 0, 0, 593, 594, 3, 36, 18, 0, 594, 595, 5, 93, 0, 0, 595, 596, 3, 58, 29, 0, 596, 117, 1, 0, 0, 0, 597, 598, 5, 17, 0, 0, 598, 601, 3, 50, 25, 0, 599, 600, 5, 33, 0, 0, 600, 602, 3, 30, 15, 0, 601, 599, 1, 0, 0, 0, 601, 602, 1, 0, 0, 0, 602, 119, 1, 0, 0, 0, 603, 605, 7, 8, 0, 0, 604, 603, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 606, 1, 0, 0, 0, 606, 607, 5, 20, 0, 0, 607, 608, 3, 122, 61, 0, 608, 609, 3, 124, 62, 0, 609, 121, 1, 0, 0, 0, 610, 613, 3, 60, 30, 0, 611, 612, 5, 89, 0, 0, 612, 614, 3, 60, 30, 0, 613, 611, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 123, 1, 0, 0, 0, 615, 616, 5, 93, 0, 0, 616, 621, 3, 126, 63, 0, 617, 618, 5, 39, 0, 0, 618, 620, 3, 126, 63, 0, 619, 617, 1, 0, 0, 0, 620, 623, 1, 0, 0, 0, 621, 619, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 125, 1, 0, 0, 0, 623, 621, 1, 0, 0, 0, 624, 625, 3, 16, 8, 0, 625, 127, 1, 0, 0, 0, 61, 139, 148, 168, 180, 189, 197, 202, 210, 212, 217, 224, 229, 234, 244, 250, 258, 260, 271, 278, 289, 292, 308, 314, 324, 328, 333, 347, 356, 360, 364, 371, 375, 382, 388, 395, 403, 411, 419, 436, 447, 458, 463, 467, 472, 483, 488, 492, 506, 517, 531, 542, 545, 550, 572, 580, 583, 588, 601, 604, 613, 621] \ No newline at end of file +[4, 1, 130, 651, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 142, 8, 1, 10, 1, 12, 1, 145, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 153, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 173, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 185, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 192, 8, 5, 10, 5, 12, 5, 195, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 202, 8, 5, 1, 5, 1, 5, 1, 5, 3, 5, 207, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 215, 8, 5, 10, 5, 12, 5, 218, 9, 5, 1, 6, 1, 6, 3, 6, 222, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 229, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 234, 8, 6, 1, 7, 1, 7, 1, 7, 3, 7, 239, 8, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 249, 8, 8, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 255, 8, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 5, 9, 263, 8, 9, 10, 9, 12, 9, 266, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 3, 10, 276, 8, 10, 1, 10, 1, 10, 1, 10, 5, 10, 281, 8, 10, 10, 10, 12, 10, 284, 9, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 5, 11, 292, 8, 11, 10, 11, 12, 11, 295, 9, 11, 1, 11, 1, 11, 3, 11, 299, 8, 11, 3, 11, 301, 8, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 5, 13, 312, 8, 13, 10, 13, 12, 13, 315, 9, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 5, 17, 331, 8, 17, 10, 17, 12, 17, 334, 9, 17, 1, 18, 1, 18, 1, 18, 3, 18, 339, 8, 18, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 347, 8, 19, 10, 19, 12, 19, 350, 9, 19, 1, 19, 3, 19, 353, 8, 19, 1, 20, 1, 20, 1, 20, 3, 20, 358, 8, 20, 1, 20, 1, 20, 1, 21, 1, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 370, 8, 23, 10, 23, 12, 23, 373, 9, 23, 1, 24, 1, 24, 1, 24, 1, 24, 5, 24, 379, 8, 24, 10, 24, 12, 24, 382, 9, 24, 1, 24, 3, 24, 385, 8, 24, 1, 24, 1, 24, 3, 24, 389, 8, 24, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 3, 26, 396, 8, 26, 1, 26, 1, 26, 3, 26, 400, 8, 26, 1, 27, 1, 27, 1, 27, 5, 27, 405, 8, 27, 10, 27, 12, 27, 408, 9, 27, 1, 28, 1, 28, 1, 28, 3, 28, 413, 8, 28, 1, 29, 1, 29, 1, 29, 5, 29, 418, 8, 29, 10, 29, 12, 29, 421, 9, 29, 1, 30, 1, 30, 1, 30, 5, 30, 426, 8, 30, 10, 30, 12, 30, 429, 9, 30, 1, 31, 1, 31, 1, 31, 5, 31, 434, 8, 31, 10, 31, 12, 31, 437, 9, 31, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 3, 33, 444, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 459, 8, 34, 10, 34, 12, 34, 462, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 470, 8, 34, 10, 34, 12, 34, 473, 9, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 1, 34, 5, 34, 481, 8, 34, 10, 34, 12, 34, 484, 9, 34, 1, 34, 1, 34, 3, 34, 488, 8, 34, 1, 35, 1, 35, 3, 35, 492, 8, 35, 1, 36, 1, 36, 1, 36, 3, 36, 497, 8, 36, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 506, 8, 38, 10, 38, 12, 38, 509, 9, 38, 1, 39, 1, 39, 3, 39, 513, 8, 39, 1, 39, 1, 39, 3, 39, 517, 8, 39, 1, 40, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 5, 42, 529, 8, 42, 10, 42, 12, 42, 532, 9, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 542, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 5, 47, 554, 8, 47, 10, 47, 12, 47, 557, 9, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 50, 1, 50, 3, 50, 567, 8, 50, 1, 51, 3, 51, 570, 8, 51, 1, 51, 1, 51, 1, 52, 3, 52, 575, 8, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 3, 58, 597, 8, 58, 1, 58, 1, 58, 1, 58, 1, 58, 5, 58, 603, 8, 58, 10, 58, 12, 58, 606, 9, 58, 3, 58, 608, 8, 58, 1, 59, 1, 59, 1, 59, 3, 59, 613, 8, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 61, 3, 61, 626, 8, 61, 1, 62, 3, 62, 629, 8, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 3, 63, 638, 8, 63, 1, 64, 1, 64, 1, 64, 1, 64, 5, 64, 644, 8, 64, 10, 64, 12, 64, 647, 9, 64, 1, 65, 1, 65, 1, 65, 0, 4, 2, 10, 18, 20, 66, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 98, 100, 102, 104, 106, 108, 110, 112, 114, 116, 118, 120, 122, 124, 126, 128, 130, 0, 9, 1, 0, 64, 65, 1, 0, 66, 68, 2, 0, 30, 30, 83, 83, 1, 0, 74, 75, 2, 0, 35, 35, 40, 40, 2, 0, 43, 43, 46, 46, 2, 0, 42, 42, 56, 56, 2, 0, 57, 57, 59, 63, 1, 0, 22, 24, 678, 0, 132, 1, 0, 0, 0, 2, 135, 1, 0, 0, 0, 4, 152, 1, 0, 0, 0, 6, 172, 1, 0, 0, 0, 8, 174, 1, 0, 0, 0, 10, 206, 1, 0, 0, 0, 12, 233, 1, 0, 0, 0, 14, 235, 1, 0, 0, 0, 16, 248, 1, 0, 0, 0, 18, 254, 1, 0, 0, 0, 20, 275, 1, 0, 0, 0, 22, 285, 1, 0, 0, 0, 24, 304, 1, 0, 0, 0, 26, 306, 1, 0, 0, 0, 28, 318, 1, 0, 0, 0, 30, 322, 1, 0, 0, 0, 32, 324, 1, 0, 0, 0, 34, 327, 1, 0, 0, 0, 36, 338, 1, 0, 0, 0, 38, 342, 1, 0, 0, 0, 40, 357, 1, 0, 0, 0, 42, 361, 1, 0, 0, 0, 44, 363, 1, 0, 0, 0, 46, 365, 1, 0, 0, 0, 48, 374, 1, 0, 0, 0, 50, 390, 1, 0, 0, 0, 52, 393, 1, 0, 0, 0, 54, 401, 1, 0, 0, 0, 56, 409, 1, 0, 0, 0, 58, 414, 1, 0, 0, 0, 60, 422, 1, 0, 0, 0, 62, 430, 1, 0, 0, 0, 64, 438, 1, 0, 0, 0, 66, 443, 1, 0, 0, 0, 68, 487, 1, 0, 0, 0, 70, 491, 1, 0, 0, 0, 72, 496, 1, 0, 0, 0, 74, 498, 1, 0, 0, 0, 76, 501, 1, 0, 0, 0, 78, 510, 1, 0, 0, 0, 80, 518, 1, 0, 0, 0, 82, 521, 1, 0, 0, 0, 84, 524, 1, 0, 0, 0, 86, 533, 1, 0, 0, 0, 88, 537, 1, 0, 0, 0, 90, 543, 1, 0, 0, 0, 92, 547, 1, 0, 0, 0, 94, 550, 1, 0, 0, 0, 96, 558, 1, 0, 0, 0, 98, 562, 1, 0, 0, 0, 100, 566, 1, 0, 0, 0, 102, 569, 1, 0, 0, 0, 104, 574, 1, 0, 0, 0, 106, 578, 1, 0, 0, 0, 108, 580, 1, 0, 0, 0, 110, 582, 1, 0, 0, 0, 112, 585, 1, 0, 0, 0, 114, 589, 1, 0, 0, 0, 116, 592, 1, 0, 0, 0, 118, 612, 1, 0, 0, 0, 120, 616, 1, 0, 0, 0, 122, 621, 1, 0, 0, 0, 124, 628, 1, 0, 0, 0, 126, 634, 1, 0, 0, 0, 128, 639, 1, 0, 0, 0, 130, 648, 1, 0, 0, 0, 132, 133, 3, 2, 1, 0, 133, 134, 5, 0, 0, 1, 134, 1, 1, 0, 0, 0, 135, 136, 6, 1, -1, 0, 136, 137, 3, 4, 2, 0, 137, 143, 1, 0, 0, 0, 138, 139, 10, 1, 0, 0, 139, 140, 5, 29, 0, 0, 140, 142, 3, 6, 3, 0, 141, 138, 1, 0, 0, 0, 142, 145, 1, 0, 0, 0, 143, 141, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 3, 1, 0, 0, 0, 145, 143, 1, 0, 0, 0, 146, 153, 3, 110, 55, 0, 147, 153, 3, 38, 19, 0, 148, 153, 3, 32, 16, 0, 149, 153, 3, 114, 57, 0, 150, 151, 4, 2, 1, 0, 151, 153, 3, 48, 24, 0, 152, 146, 1, 0, 0, 0, 152, 147, 1, 0, 0, 0, 152, 148, 1, 0, 0, 0, 152, 149, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 153, 5, 1, 0, 0, 0, 154, 173, 3, 50, 25, 0, 155, 173, 3, 8, 4, 0, 156, 173, 3, 80, 40, 0, 157, 173, 3, 74, 37, 0, 158, 173, 3, 52, 26, 0, 159, 173, 3, 76, 38, 0, 160, 173, 3, 82, 41, 0, 161, 173, 3, 84, 42, 0, 162, 173, 3, 88, 44, 0, 163, 173, 3, 90, 45, 0, 164, 173, 3, 116, 58, 0, 165, 173, 3, 92, 46, 0, 166, 167, 4, 3, 2, 0, 167, 173, 3, 122, 61, 0, 168, 169, 4, 3, 3, 0, 169, 173, 3, 120, 60, 0, 170, 171, 4, 3, 4, 0, 171, 173, 3, 124, 62, 0, 172, 154, 1, 0, 0, 0, 172, 155, 1, 0, 0, 0, 172, 156, 1, 0, 0, 0, 172, 157, 1, 0, 0, 0, 172, 158, 1, 0, 0, 0, 172, 159, 1, 0, 0, 0, 172, 160, 1, 0, 0, 0, 172, 161, 1, 0, 0, 0, 172, 162, 1, 0, 0, 0, 172, 163, 1, 0, 0, 0, 172, 164, 1, 0, 0, 0, 172, 165, 1, 0, 0, 0, 172, 166, 1, 0, 0, 0, 172, 168, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 7, 1, 0, 0, 0, 174, 175, 5, 16, 0, 0, 175, 176, 3, 10, 5, 0, 176, 9, 1, 0, 0, 0, 177, 178, 6, 5, -1, 0, 178, 179, 5, 49, 0, 0, 179, 207, 3, 10, 5, 8, 180, 207, 3, 16, 8, 0, 181, 207, 3, 12, 6, 0, 182, 184, 3, 16, 8, 0, 183, 185, 5, 49, 0, 0, 184, 183, 1, 0, 0, 0, 184, 185, 1, 0, 0, 0, 185, 186, 1, 0, 0, 0, 186, 187, 5, 44, 0, 0, 187, 188, 5, 48, 0, 0, 188, 193, 3, 16, 8, 0, 189, 190, 5, 39, 0, 0, 190, 192, 3, 16, 8, 0, 191, 189, 1, 0, 0, 0, 192, 195, 1, 0, 0, 0, 193, 191, 1, 0, 0, 0, 193, 194, 1, 0, 0, 0, 194, 196, 1, 0, 0, 0, 195, 193, 1, 0, 0, 0, 196, 197, 5, 55, 0, 0, 197, 207, 1, 0, 0, 0, 198, 199, 3, 16, 8, 0, 199, 201, 5, 45, 0, 0, 200, 202, 5, 49, 0, 0, 201, 200, 1, 0, 0, 0, 201, 202, 1, 0, 0, 0, 202, 203, 1, 0, 0, 0, 203, 204, 5, 50, 0, 0, 204, 207, 1, 0, 0, 0, 205, 207, 3, 14, 7, 0, 206, 177, 1, 0, 0, 0, 206, 180, 1, 0, 0, 0, 206, 181, 1, 0, 0, 0, 206, 182, 1, 0, 0, 0, 206, 198, 1, 0, 0, 0, 206, 205, 1, 0, 0, 0, 207, 216, 1, 0, 0, 0, 208, 209, 10, 5, 0, 0, 209, 210, 5, 34, 0, 0, 210, 215, 3, 10, 5, 6, 211, 212, 10, 4, 0, 0, 212, 213, 5, 52, 0, 0, 213, 215, 3, 10, 5, 5, 214, 208, 1, 0, 0, 0, 214, 211, 1, 0, 0, 0, 215, 218, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 216, 217, 1, 0, 0, 0, 217, 11, 1, 0, 0, 0, 218, 216, 1, 0, 0, 0, 219, 221, 3, 16, 8, 0, 220, 222, 5, 49, 0, 0, 221, 220, 1, 0, 0, 0, 221, 222, 1, 0, 0, 0, 222, 223, 1, 0, 0, 0, 223, 224, 5, 47, 0, 0, 224, 225, 3, 106, 53, 0, 225, 234, 1, 0, 0, 0, 226, 228, 3, 16, 8, 0, 227, 229, 5, 49, 0, 0, 228, 227, 1, 0, 0, 0, 228, 229, 1, 0, 0, 0, 229, 230, 1, 0, 0, 0, 230, 231, 5, 54, 0, 0, 231, 232, 3, 106, 53, 0, 232, 234, 1, 0, 0, 0, 233, 219, 1, 0, 0, 0, 233, 226, 1, 0, 0, 0, 234, 13, 1, 0, 0, 0, 235, 238, 3, 58, 29, 0, 236, 237, 5, 37, 0, 0, 237, 239, 3, 30, 15, 0, 238, 236, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 1, 0, 0, 0, 240, 241, 5, 38, 0, 0, 241, 242, 3, 68, 34, 0, 242, 15, 1, 0, 0, 0, 243, 249, 3, 18, 9, 0, 244, 245, 3, 18, 9, 0, 245, 246, 3, 108, 54, 0, 246, 247, 3, 18, 9, 0, 247, 249, 1, 0, 0, 0, 248, 243, 1, 0, 0, 0, 248, 244, 1, 0, 0, 0, 249, 17, 1, 0, 0, 0, 250, 251, 6, 9, -1, 0, 251, 255, 3, 20, 10, 0, 252, 253, 7, 0, 0, 0, 253, 255, 3, 18, 9, 3, 254, 250, 1, 0, 0, 0, 254, 252, 1, 0, 0, 0, 255, 264, 1, 0, 0, 0, 256, 257, 10, 2, 0, 0, 257, 258, 7, 1, 0, 0, 258, 263, 3, 18, 9, 3, 259, 260, 10, 1, 0, 0, 260, 261, 7, 0, 0, 0, 261, 263, 3, 18, 9, 2, 262, 256, 1, 0, 0, 0, 262, 259, 1, 0, 0, 0, 263, 266, 1, 0, 0, 0, 264, 262, 1, 0, 0, 0, 264, 265, 1, 0, 0, 0, 265, 19, 1, 0, 0, 0, 266, 264, 1, 0, 0, 0, 267, 268, 6, 10, -1, 0, 268, 276, 3, 68, 34, 0, 269, 276, 3, 58, 29, 0, 270, 276, 3, 22, 11, 0, 271, 272, 5, 48, 0, 0, 272, 273, 3, 10, 5, 0, 273, 274, 5, 55, 0, 0, 274, 276, 1, 0, 0, 0, 275, 267, 1, 0, 0, 0, 275, 269, 1, 0, 0, 0, 275, 270, 1, 0, 0, 0, 275, 271, 1, 0, 0, 0, 276, 282, 1, 0, 0, 0, 277, 278, 10, 1, 0, 0, 278, 279, 5, 37, 0, 0, 279, 281, 3, 30, 15, 0, 280, 277, 1, 0, 0, 0, 281, 284, 1, 0, 0, 0, 282, 280, 1, 0, 0, 0, 282, 283, 1, 0, 0, 0, 283, 21, 1, 0, 0, 0, 284, 282, 1, 0, 0, 0, 285, 286, 3, 24, 12, 0, 286, 300, 5, 48, 0, 0, 287, 301, 5, 66, 0, 0, 288, 293, 3, 10, 5, 0, 289, 290, 5, 39, 0, 0, 290, 292, 3, 10, 5, 0, 291, 289, 1, 0, 0, 0, 292, 295, 1, 0, 0, 0, 293, 291, 1, 0, 0, 0, 293, 294, 1, 0, 0, 0, 294, 298, 1, 0, 0, 0, 295, 293, 1, 0, 0, 0, 296, 297, 5, 39, 0, 0, 297, 299, 3, 26, 13, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 301, 1, 0, 0, 0, 300, 287, 1, 0, 0, 0, 300, 288, 1, 0, 0, 0, 300, 301, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 303, 5, 55, 0, 0, 303, 23, 1, 0, 0, 0, 304, 305, 3, 72, 36, 0, 305, 25, 1, 0, 0, 0, 306, 307, 4, 13, 10, 0, 307, 308, 5, 69, 0, 0, 308, 313, 3, 28, 14, 0, 309, 310, 5, 39, 0, 0, 310, 312, 3, 28, 14, 0, 311, 309, 1, 0, 0, 0, 312, 315, 1, 0, 0, 0, 313, 311, 1, 0, 0, 0, 313, 314, 1, 0, 0, 0, 314, 316, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 316, 317, 5, 70, 0, 0, 317, 27, 1, 0, 0, 0, 318, 319, 3, 106, 53, 0, 319, 320, 5, 38, 0, 0, 320, 321, 3, 68, 34, 0, 321, 29, 1, 0, 0, 0, 322, 323, 3, 64, 32, 0, 323, 31, 1, 0, 0, 0, 324, 325, 5, 12, 0, 0, 325, 326, 3, 34, 17, 0, 326, 33, 1, 0, 0, 0, 327, 332, 3, 36, 18, 0, 328, 329, 5, 39, 0, 0, 329, 331, 3, 36, 18, 0, 330, 328, 1, 0, 0, 0, 331, 334, 1, 0, 0, 0, 332, 330, 1, 0, 0, 0, 332, 333, 1, 0, 0, 0, 333, 35, 1, 0, 0, 0, 334, 332, 1, 0, 0, 0, 335, 336, 3, 58, 29, 0, 336, 337, 5, 36, 0, 0, 337, 339, 1, 0, 0, 0, 338, 335, 1, 0, 0, 0, 338, 339, 1, 0, 0, 0, 339, 340, 1, 0, 0, 0, 340, 341, 3, 10, 5, 0, 341, 37, 1, 0, 0, 0, 342, 343, 5, 6, 0, 0, 343, 348, 3, 40, 20, 0, 344, 345, 5, 39, 0, 0, 345, 347, 3, 40, 20, 0, 346, 344, 1, 0, 0, 0, 347, 350, 1, 0, 0, 0, 348, 346, 1, 0, 0, 0, 348, 349, 1, 0, 0, 0, 349, 352, 1, 0, 0, 0, 350, 348, 1, 0, 0, 0, 351, 353, 3, 46, 23, 0, 352, 351, 1, 0, 0, 0, 352, 353, 1, 0, 0, 0, 353, 39, 1, 0, 0, 0, 354, 355, 3, 42, 21, 0, 355, 356, 5, 38, 0, 0, 356, 358, 1, 0, 0, 0, 357, 354, 1, 0, 0, 0, 357, 358, 1, 0, 0, 0, 358, 359, 1, 0, 0, 0, 359, 360, 3, 44, 22, 0, 360, 41, 1, 0, 0, 0, 361, 362, 5, 83, 0, 0, 362, 43, 1, 0, 0, 0, 363, 364, 7, 2, 0, 0, 364, 45, 1, 0, 0, 0, 365, 366, 5, 82, 0, 0, 366, 371, 5, 83, 0, 0, 367, 368, 5, 39, 0, 0, 368, 370, 5, 83, 0, 0, 369, 367, 1, 0, 0, 0, 370, 373, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 371, 372, 1, 0, 0, 0, 372, 47, 1, 0, 0, 0, 373, 371, 1, 0, 0, 0, 374, 375, 5, 19, 0, 0, 375, 380, 3, 40, 20, 0, 376, 377, 5, 39, 0, 0, 377, 379, 3, 40, 20, 0, 378, 376, 1, 0, 0, 0, 379, 382, 1, 0, 0, 0, 380, 378, 1, 0, 0, 0, 380, 381, 1, 0, 0, 0, 381, 384, 1, 0, 0, 0, 382, 380, 1, 0, 0, 0, 383, 385, 3, 54, 27, 0, 384, 383, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 388, 1, 0, 0, 0, 386, 387, 5, 33, 0, 0, 387, 389, 3, 34, 17, 0, 388, 386, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 49, 1, 0, 0, 0, 390, 391, 5, 4, 0, 0, 391, 392, 3, 34, 17, 0, 392, 51, 1, 0, 0, 0, 393, 395, 5, 15, 0, 0, 394, 396, 3, 54, 27, 0, 395, 394, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 399, 1, 0, 0, 0, 397, 398, 5, 33, 0, 0, 398, 400, 3, 34, 17, 0, 399, 397, 1, 0, 0, 0, 399, 400, 1, 0, 0, 0, 400, 53, 1, 0, 0, 0, 401, 406, 3, 56, 28, 0, 402, 403, 5, 39, 0, 0, 403, 405, 3, 56, 28, 0, 404, 402, 1, 0, 0, 0, 405, 408, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 406, 407, 1, 0, 0, 0, 407, 55, 1, 0, 0, 0, 408, 406, 1, 0, 0, 0, 409, 412, 3, 36, 18, 0, 410, 411, 5, 16, 0, 0, 411, 413, 3, 10, 5, 0, 412, 410, 1, 0, 0, 0, 412, 413, 1, 0, 0, 0, 413, 57, 1, 0, 0, 0, 414, 419, 3, 72, 36, 0, 415, 416, 5, 41, 0, 0, 416, 418, 3, 72, 36, 0, 417, 415, 1, 0, 0, 0, 418, 421, 1, 0, 0, 0, 419, 417, 1, 0, 0, 0, 419, 420, 1, 0, 0, 0, 420, 59, 1, 0, 0, 0, 421, 419, 1, 0, 0, 0, 422, 427, 3, 66, 33, 0, 423, 424, 5, 41, 0, 0, 424, 426, 3, 66, 33, 0, 425, 423, 1, 0, 0, 0, 426, 429, 1, 0, 0, 0, 427, 425, 1, 0, 0, 0, 427, 428, 1, 0, 0, 0, 428, 61, 1, 0, 0, 0, 429, 427, 1, 0, 0, 0, 430, 435, 3, 60, 30, 0, 431, 432, 5, 39, 0, 0, 432, 434, 3, 60, 30, 0, 433, 431, 1, 0, 0, 0, 434, 437, 1, 0, 0, 0, 435, 433, 1, 0, 0, 0, 435, 436, 1, 0, 0, 0, 436, 63, 1, 0, 0, 0, 437, 435, 1, 0, 0, 0, 438, 439, 7, 3, 0, 0, 439, 65, 1, 0, 0, 0, 440, 444, 5, 87, 0, 0, 441, 442, 4, 33, 11, 0, 442, 444, 3, 70, 35, 0, 443, 440, 1, 0, 0, 0, 443, 441, 1, 0, 0, 0, 444, 67, 1, 0, 0, 0, 445, 488, 5, 50, 0, 0, 446, 447, 3, 104, 52, 0, 447, 448, 5, 74, 0, 0, 448, 488, 1, 0, 0, 0, 449, 488, 3, 102, 51, 0, 450, 488, 3, 104, 52, 0, 451, 488, 3, 98, 49, 0, 452, 488, 3, 70, 35, 0, 453, 488, 3, 106, 53, 0, 454, 455, 5, 72, 0, 0, 455, 460, 3, 100, 50, 0, 456, 457, 5, 39, 0, 0, 457, 459, 3, 100, 50, 0, 458, 456, 1, 0, 0, 0, 459, 462, 1, 0, 0, 0, 460, 458, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 463, 1, 0, 0, 0, 462, 460, 1, 0, 0, 0, 463, 464, 5, 73, 0, 0, 464, 488, 1, 0, 0, 0, 465, 466, 5, 72, 0, 0, 466, 471, 3, 98, 49, 0, 467, 468, 5, 39, 0, 0, 468, 470, 3, 98, 49, 0, 469, 467, 1, 0, 0, 0, 470, 473, 1, 0, 0, 0, 471, 469, 1, 0, 0, 0, 471, 472, 1, 0, 0, 0, 472, 474, 1, 0, 0, 0, 473, 471, 1, 0, 0, 0, 474, 475, 5, 73, 0, 0, 475, 488, 1, 0, 0, 0, 476, 477, 5, 72, 0, 0, 477, 482, 3, 106, 53, 0, 478, 479, 5, 39, 0, 0, 479, 481, 3, 106, 53, 0, 480, 478, 1, 0, 0, 0, 481, 484, 1, 0, 0, 0, 482, 480, 1, 0, 0, 0, 482, 483, 1, 0, 0, 0, 483, 485, 1, 0, 0, 0, 484, 482, 1, 0, 0, 0, 485, 486, 5, 73, 0, 0, 486, 488, 1, 0, 0, 0, 487, 445, 1, 0, 0, 0, 487, 446, 1, 0, 0, 0, 487, 449, 1, 0, 0, 0, 487, 450, 1, 0, 0, 0, 487, 451, 1, 0, 0, 0, 487, 452, 1, 0, 0, 0, 487, 453, 1, 0, 0, 0, 487, 454, 1, 0, 0, 0, 487, 465, 1, 0, 0, 0, 487, 476, 1, 0, 0, 0, 488, 69, 1, 0, 0, 0, 489, 492, 5, 53, 0, 0, 490, 492, 5, 71, 0, 0, 491, 489, 1, 0, 0, 0, 491, 490, 1, 0, 0, 0, 492, 71, 1, 0, 0, 0, 493, 497, 3, 64, 32, 0, 494, 495, 4, 36, 12, 0, 495, 497, 3, 70, 35, 0, 496, 493, 1, 0, 0, 0, 496, 494, 1, 0, 0, 0, 497, 73, 1, 0, 0, 0, 498, 499, 5, 9, 0, 0, 499, 500, 5, 31, 0, 0, 500, 75, 1, 0, 0, 0, 501, 502, 5, 14, 0, 0, 502, 507, 3, 78, 39, 0, 503, 504, 5, 39, 0, 0, 504, 506, 3, 78, 39, 0, 505, 503, 1, 0, 0, 0, 506, 509, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 508, 77, 1, 0, 0, 0, 509, 507, 1, 0, 0, 0, 510, 512, 3, 10, 5, 0, 511, 513, 7, 4, 0, 0, 512, 511, 1, 0, 0, 0, 512, 513, 1, 0, 0, 0, 513, 516, 1, 0, 0, 0, 514, 515, 5, 51, 0, 0, 515, 517, 7, 5, 0, 0, 516, 514, 1, 0, 0, 0, 516, 517, 1, 0, 0, 0, 517, 79, 1, 0, 0, 0, 518, 519, 5, 8, 0, 0, 519, 520, 3, 62, 31, 0, 520, 81, 1, 0, 0, 0, 521, 522, 5, 2, 0, 0, 522, 523, 3, 62, 31, 0, 523, 83, 1, 0, 0, 0, 524, 525, 5, 11, 0, 0, 525, 530, 3, 86, 43, 0, 526, 527, 5, 39, 0, 0, 527, 529, 3, 86, 43, 0, 528, 526, 1, 0, 0, 0, 529, 532, 1, 0, 0, 0, 530, 528, 1, 0, 0, 0, 530, 531, 1, 0, 0, 0, 531, 85, 1, 0, 0, 0, 532, 530, 1, 0, 0, 0, 533, 534, 3, 60, 30, 0, 534, 535, 5, 91, 0, 0, 535, 536, 3, 60, 30, 0, 536, 87, 1, 0, 0, 0, 537, 538, 5, 1, 0, 0, 538, 539, 3, 20, 10, 0, 539, 541, 3, 106, 53, 0, 540, 542, 3, 94, 47, 0, 541, 540, 1, 0, 0, 0, 541, 542, 1, 0, 0, 0, 542, 89, 1, 0, 0, 0, 543, 544, 5, 7, 0, 0, 544, 545, 3, 20, 10, 0, 545, 546, 3, 106, 53, 0, 546, 91, 1, 0, 0, 0, 547, 548, 5, 10, 0, 0, 548, 549, 3, 58, 29, 0, 549, 93, 1, 0, 0, 0, 550, 555, 3, 96, 48, 0, 551, 552, 5, 39, 0, 0, 552, 554, 3, 96, 48, 0, 553, 551, 1, 0, 0, 0, 554, 557, 1, 0, 0, 0, 555, 553, 1, 0, 0, 0, 555, 556, 1, 0, 0, 0, 556, 95, 1, 0, 0, 0, 557, 555, 1, 0, 0, 0, 558, 559, 3, 64, 32, 0, 559, 560, 5, 36, 0, 0, 560, 561, 3, 68, 34, 0, 561, 97, 1, 0, 0, 0, 562, 563, 7, 6, 0, 0, 563, 99, 1, 0, 0, 0, 564, 567, 3, 102, 51, 0, 565, 567, 3, 104, 52, 0, 566, 564, 1, 0, 0, 0, 566, 565, 1, 0, 0, 0, 567, 101, 1, 0, 0, 0, 568, 570, 7, 0, 0, 0, 569, 568, 1, 0, 0, 0, 569, 570, 1, 0, 0, 0, 570, 571, 1, 0, 0, 0, 571, 572, 5, 32, 0, 0, 572, 103, 1, 0, 0, 0, 573, 575, 7, 0, 0, 0, 574, 573, 1, 0, 0, 0, 574, 575, 1, 0, 0, 0, 575, 576, 1, 0, 0, 0, 576, 577, 5, 31, 0, 0, 577, 105, 1, 0, 0, 0, 578, 579, 5, 30, 0, 0, 579, 107, 1, 0, 0, 0, 580, 581, 7, 7, 0, 0, 581, 109, 1, 0, 0, 0, 582, 583, 5, 5, 0, 0, 583, 584, 3, 112, 56, 0, 584, 111, 1, 0, 0, 0, 585, 586, 5, 72, 0, 0, 586, 587, 3, 2, 1, 0, 587, 588, 5, 73, 0, 0, 588, 113, 1, 0, 0, 0, 589, 590, 5, 13, 0, 0, 590, 591, 5, 107, 0, 0, 591, 115, 1, 0, 0, 0, 592, 593, 5, 3, 0, 0, 593, 596, 5, 97, 0, 0, 594, 595, 5, 95, 0, 0, 595, 597, 3, 60, 30, 0, 596, 594, 1, 0, 0, 0, 596, 597, 1, 0, 0, 0, 597, 607, 1, 0, 0, 0, 598, 599, 5, 96, 0, 0, 599, 604, 3, 118, 59, 0, 600, 601, 5, 39, 0, 0, 601, 603, 3, 118, 59, 0, 602, 600, 1, 0, 0, 0, 603, 606, 1, 0, 0, 0, 604, 602, 1, 0, 0, 0, 604, 605, 1, 0, 0, 0, 605, 608, 1, 0, 0, 0, 606, 604, 1, 0, 0, 0, 607, 598, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 608, 117, 1, 0, 0, 0, 609, 610, 3, 60, 30, 0, 610, 611, 5, 36, 0, 0, 611, 613, 1, 0, 0, 0, 612, 609, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 614, 1, 0, 0, 0, 614, 615, 3, 60, 30, 0, 615, 119, 1, 0, 0, 0, 616, 617, 5, 18, 0, 0, 617, 618, 3, 40, 20, 0, 618, 619, 5, 95, 0, 0, 619, 620, 3, 62, 31, 0, 620, 121, 1, 0, 0, 0, 621, 622, 5, 17, 0, 0, 622, 625, 3, 54, 27, 0, 623, 624, 5, 33, 0, 0, 624, 626, 3, 34, 17, 0, 625, 623, 1, 0, 0, 0, 625, 626, 1, 0, 0, 0, 626, 123, 1, 0, 0, 0, 627, 629, 7, 8, 0, 0, 628, 627, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 631, 5, 20, 0, 0, 631, 632, 3, 126, 63, 0, 632, 633, 3, 128, 64, 0, 633, 125, 1, 0, 0, 0, 634, 637, 3, 64, 32, 0, 635, 636, 5, 91, 0, 0, 636, 638, 3, 64, 32, 0, 637, 635, 1, 0, 0, 0, 637, 638, 1, 0, 0, 0, 638, 127, 1, 0, 0, 0, 639, 640, 5, 95, 0, 0, 640, 645, 3, 130, 65, 0, 641, 642, 5, 39, 0, 0, 642, 644, 3, 130, 65, 0, 643, 641, 1, 0, 0, 0, 644, 647, 1, 0, 0, 0, 645, 643, 1, 0, 0, 0, 645, 646, 1, 0, 0, 0, 646, 129, 1, 0, 0, 0, 647, 645, 1, 0, 0, 0, 648, 649, 3, 16, 8, 0, 649, 131, 1, 0, 0, 0, 63, 143, 152, 172, 184, 193, 201, 206, 214, 216, 221, 228, 233, 238, 248, 254, 262, 264, 275, 282, 293, 298, 300, 313, 332, 338, 348, 352, 357, 371, 380, 384, 388, 395, 399, 406, 412, 419, 427, 435, 443, 460, 471, 482, 487, 491, 496, 507, 512, 516, 530, 541, 555, 566, 569, 574, 596, 604, 607, 612, 625, 628, 637, 645] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index e272dc7f477a4..beb14e1588472 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -35,58 +35,59 @@ public class EsqlBaseParser extends ParserConfig { FIRST=43, IN=44, IS=45, LAST=46, LIKE=47, LP=48, NOT=49, NULL=50, NULLS=51, OR=52, PARAM=53, RLIKE=54, RP=55, TRUE=56, EQ=57, CIEQ=58, NEQ=59, LT=60, LTE=61, GT=62, GTE=63, PLUS=64, MINUS=65, ASTERISK=66, SLASH=67, PERCENT=68, - NAMED_OR_POSITIONAL_PARAM=69, OPENING_BRACKET=70, CLOSING_BRACKET=71, - UNQUOTED_IDENTIFIER=72, QUOTED_IDENTIFIER=73, EXPR_LINE_COMMENT=74, EXPR_MULTILINE_COMMENT=75, - EXPR_WS=76, EXPLAIN_WS=77, EXPLAIN_LINE_COMMENT=78, EXPLAIN_MULTILINE_COMMENT=79, - METADATA=80, UNQUOTED_SOURCE=81, FROM_LINE_COMMENT=82, FROM_MULTILINE_COMMENT=83, - FROM_WS=84, ID_PATTERN=85, PROJECT_LINE_COMMENT=86, PROJECT_MULTILINE_COMMENT=87, - PROJECT_WS=88, AS=89, RENAME_LINE_COMMENT=90, RENAME_MULTILINE_COMMENT=91, - RENAME_WS=92, ON=93, WITH=94, ENRICH_POLICY_NAME=95, ENRICH_LINE_COMMENT=96, - ENRICH_MULTILINE_COMMENT=97, ENRICH_WS=98, ENRICH_FIELD_LINE_COMMENT=99, - ENRICH_FIELD_MULTILINE_COMMENT=100, ENRICH_FIELD_WS=101, MVEXPAND_LINE_COMMENT=102, - MVEXPAND_MULTILINE_COMMENT=103, MVEXPAND_WS=104, INFO=105, SHOW_LINE_COMMENT=106, - SHOW_MULTILINE_COMMENT=107, SHOW_WS=108, SETTING=109, SETTING_LINE_COMMENT=110, - SETTTING_MULTILINE_COMMENT=111, SETTING_WS=112, LOOKUP_LINE_COMMENT=113, - LOOKUP_MULTILINE_COMMENT=114, LOOKUP_WS=115, LOOKUP_FIELD_LINE_COMMENT=116, - LOOKUP_FIELD_MULTILINE_COMMENT=117, LOOKUP_FIELD_WS=118, USING=119, JOIN_LINE_COMMENT=120, - JOIN_MULTILINE_COMMENT=121, JOIN_WS=122, METRICS_LINE_COMMENT=123, METRICS_MULTILINE_COMMENT=124, - METRICS_WS=125, CLOSING_METRICS_LINE_COMMENT=126, CLOSING_METRICS_MULTILINE_COMMENT=127, - CLOSING_METRICS_WS=128; + LEFT_BRACES=69, RIGHT_BRACES=70, NAMED_OR_POSITIONAL_PARAM=71, OPENING_BRACKET=72, + CLOSING_BRACKET=73, UNQUOTED_IDENTIFIER=74, QUOTED_IDENTIFIER=75, EXPR_LINE_COMMENT=76, + EXPR_MULTILINE_COMMENT=77, EXPR_WS=78, EXPLAIN_WS=79, EXPLAIN_LINE_COMMENT=80, + EXPLAIN_MULTILINE_COMMENT=81, METADATA=82, UNQUOTED_SOURCE=83, FROM_LINE_COMMENT=84, + FROM_MULTILINE_COMMENT=85, FROM_WS=86, ID_PATTERN=87, PROJECT_LINE_COMMENT=88, + PROJECT_MULTILINE_COMMENT=89, PROJECT_WS=90, AS=91, RENAME_LINE_COMMENT=92, + RENAME_MULTILINE_COMMENT=93, RENAME_WS=94, ON=95, WITH=96, ENRICH_POLICY_NAME=97, + ENRICH_LINE_COMMENT=98, ENRICH_MULTILINE_COMMENT=99, ENRICH_WS=100, ENRICH_FIELD_LINE_COMMENT=101, + ENRICH_FIELD_MULTILINE_COMMENT=102, ENRICH_FIELD_WS=103, MVEXPAND_LINE_COMMENT=104, + MVEXPAND_MULTILINE_COMMENT=105, MVEXPAND_WS=106, INFO=107, SHOW_LINE_COMMENT=108, + SHOW_MULTILINE_COMMENT=109, SHOW_WS=110, SETTING=111, SETTING_LINE_COMMENT=112, + SETTTING_MULTILINE_COMMENT=113, SETTING_WS=114, LOOKUP_LINE_COMMENT=115, + LOOKUP_MULTILINE_COMMENT=116, LOOKUP_WS=117, LOOKUP_FIELD_LINE_COMMENT=118, + LOOKUP_FIELD_MULTILINE_COMMENT=119, LOOKUP_FIELD_WS=120, USING=121, JOIN_LINE_COMMENT=122, + JOIN_MULTILINE_COMMENT=123, JOIN_WS=124, METRICS_LINE_COMMENT=125, METRICS_MULTILINE_COMMENT=126, + METRICS_WS=127, CLOSING_METRICS_LINE_COMMENT=128, CLOSING_METRICS_MULTILINE_COMMENT=129, + CLOSING_METRICS_WS=130; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, RULE_matchBooleanExpression = 7, RULE_valueExpression = 8, RULE_operatorExpression = 9, RULE_primaryExpression = 10, RULE_functionExpression = 11, RULE_functionName = 12, - RULE_dataType = 13, RULE_rowCommand = 14, RULE_fields = 15, RULE_field = 16, - RULE_fromCommand = 17, RULE_indexPattern = 18, RULE_clusterString = 19, - RULE_indexString = 20, RULE_metadata = 21, RULE_metricsCommand = 22, RULE_evalCommand = 23, - RULE_statsCommand = 24, RULE_aggFields = 25, RULE_aggField = 26, RULE_qualifiedName = 27, - RULE_qualifiedNamePattern = 28, RULE_qualifiedNamePatterns = 29, RULE_identifier = 30, - RULE_identifierPattern = 31, RULE_constant = 32, RULE_parameter = 33, - RULE_identifierOrParameter = 34, RULE_limitCommand = 35, RULE_sortCommand = 36, - RULE_orderExpression = 37, RULE_keepCommand = 38, RULE_dropCommand = 39, - RULE_renameCommand = 40, RULE_renameClause = 41, RULE_dissectCommand = 42, - RULE_grokCommand = 43, RULE_mvExpandCommand = 44, RULE_commandOptions = 45, - RULE_commandOption = 46, RULE_booleanValue = 47, RULE_numericValue = 48, - RULE_decimalValue = 49, RULE_integerValue = 50, RULE_string = 51, RULE_comparisonOperator = 52, - RULE_explainCommand = 53, RULE_subqueryExpression = 54, RULE_showCommand = 55, - RULE_enrichCommand = 56, RULE_enrichWithClause = 57, RULE_lookupCommand = 58, - RULE_inlinestatsCommand = 59, RULE_joinCommand = 60, RULE_joinTarget = 61, - RULE_joinCondition = 62, RULE_joinPredicate = 63; + RULE_mapExpression = 13, RULE_entryExpression = 14, RULE_dataType = 15, + RULE_rowCommand = 16, RULE_fields = 17, RULE_field = 18, RULE_fromCommand = 19, + RULE_indexPattern = 20, RULE_clusterString = 21, RULE_indexString = 22, + RULE_metadata = 23, RULE_metricsCommand = 24, RULE_evalCommand = 25, RULE_statsCommand = 26, + RULE_aggFields = 27, RULE_aggField = 28, RULE_qualifiedName = 29, RULE_qualifiedNamePattern = 30, + RULE_qualifiedNamePatterns = 31, RULE_identifier = 32, RULE_identifierPattern = 33, + RULE_constant = 34, RULE_parameter = 35, RULE_identifierOrParameter = 36, + RULE_limitCommand = 37, RULE_sortCommand = 38, RULE_orderExpression = 39, + RULE_keepCommand = 40, RULE_dropCommand = 41, RULE_renameCommand = 42, + RULE_renameClause = 43, RULE_dissectCommand = 44, RULE_grokCommand = 45, + RULE_mvExpandCommand = 46, RULE_commandOptions = 47, RULE_commandOption = 48, + RULE_booleanValue = 49, RULE_numericValue = 50, RULE_decimalValue = 51, + RULE_integerValue = 52, RULE_string = 53, RULE_comparisonOperator = 54, + RULE_explainCommand = 55, RULE_subqueryExpression = 56, RULE_showCommand = 57, + RULE_enrichCommand = 58, RULE_enrichWithClause = 59, RULE_lookupCommand = 60, + RULE_inlinestatsCommand = 61, RULE_joinCommand = 62, RULE_joinTarget = 63, + RULE_joinCondition = 64, RULE_joinPredicate = 65; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", "booleanExpression", "regexBooleanExpression", "matchBooleanExpression", "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", - "functionName", "dataType", "rowCommand", "fields", "field", "fromCommand", - "indexPattern", "clusterString", "indexString", "metadata", "metricsCommand", - "evalCommand", "statsCommand", "aggFields", "aggField", "qualifiedName", - "qualifiedNamePattern", "qualifiedNamePatterns", "identifier", "identifierPattern", - "constant", "parameter", "identifierOrParameter", "limitCommand", "sortCommand", - "orderExpression", "keepCommand", "dropCommand", "renameCommand", "renameClause", - "dissectCommand", "grokCommand", "mvExpandCommand", "commandOptions", - "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue", - "string", "comparisonOperator", "explainCommand", "subqueryExpression", + "functionName", "mapExpression", "entryExpression", "dataType", "rowCommand", + "fields", "field", "fromCommand", "indexPattern", "clusterString", "indexString", + "metadata", "metricsCommand", "evalCommand", "statsCommand", "aggFields", + "aggField", "qualifiedName", "qualifiedNamePattern", "qualifiedNamePatterns", + "identifier", "identifierPattern", "constant", "parameter", "identifierOrParameter", + "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", + "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", + "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", + "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", "showCommand", "enrichCommand", "enrichWithClause", "lookupCommand", "inlinestatsCommand", "joinCommand", "joinTarget", "joinCondition", "joinPredicate" }; @@ -103,7 +104,7 @@ private static String[] makeLiteralNames() { "'in'", "'is'", "'last'", "'like'", "'('", "'not'", "'null'", "'nulls'", "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'=~'", "'!='", "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, null, - "']'", null, null, null, null, null, null, null, null, "'metadata'", + null, null, "']'", null, null, null, null, null, null, null, null, "'metadata'", null, null, null, null, null, null, null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, null, null, null, null, null, null, null, "'info'", null, null, null, null, null, null, null, null, null, @@ -122,23 +123,23 @@ private static String[] makeSymbolicNames() { "COLON", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "IN", "IS", "LAST", "LIKE", "LP", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", "TRUE", "EQ", "CIEQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", - "SLASH", "PERCENT", "NAMED_OR_POSITIONAL_PARAM", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", - "METADATA", "UNQUOTED_SOURCE", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", - "FROM_WS", "ID_PATTERN", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", - "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", - "RENAME_WS", "ON", "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", - "ENRICH_MULTILINE_COMMENT", "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", - "ENRICH_FIELD_MULTILINE_COMMENT", "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", - "MVEXPAND_MULTILINE_COMMENT", "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", - "SHOW_MULTILINE_COMMENT", "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", - "SETTTING_MULTILINE_COMMENT", "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", - "LOOKUP_WS", "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", - "LOOKUP_FIELD_WS", "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", - "JOIN_WS", "METRICS_LINE_COMMENT", "METRICS_MULTILINE_COMMENT", "METRICS_WS", - "CLOSING_METRICS_LINE_COMMENT", "CLOSING_METRICS_MULTILINE_COMMENT", - "CLOSING_METRICS_WS" + "SLASH", "PERCENT", "LEFT_BRACES", "RIGHT_BRACES", "NAMED_OR_POSITIONAL_PARAM", + "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", + "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", "EXPLAIN_WS", + "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "METADATA", "UNQUOTED_SOURCE", + "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", "FROM_WS", "ID_PATTERN", + "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "AS", + "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", "RENAME_WS", "ON", + "WITH", "ENRICH_POLICY_NAME", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", + "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", + "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", + "MVEXPAND_WS", "INFO", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", + "SHOW_WS", "SETTING", "SETTING_LINE_COMMENT", "SETTTING_MULTILINE_COMMENT", + "SETTING_WS", "LOOKUP_LINE_COMMENT", "LOOKUP_MULTILINE_COMMENT", "LOOKUP_WS", + "LOOKUP_FIELD_LINE_COMMENT", "LOOKUP_FIELD_MULTILINE_COMMENT", "LOOKUP_FIELD_WS", + "USING", "JOIN_LINE_COMMENT", "JOIN_MULTILINE_COMMENT", "JOIN_WS", "METRICS_LINE_COMMENT", + "METRICS_MULTILINE_COMMENT", "METRICS_WS", "CLOSING_METRICS_LINE_COMMENT", + "CLOSING_METRICS_MULTILINE_COMMENT", "CLOSING_METRICS_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -225,9 +226,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(128); + setState(132); query(0); - setState(129); + setState(133); match(EOF); } } @@ -323,11 +324,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(132); + setState(136); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(139); + setState(143); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -338,16 +339,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(134); + setState(138); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(135); + setState(139); match(PIPE); - setState(136); + setState(140); processingCommand(); } } } - setState(141); + setState(145); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -405,43 +406,43 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(148); + setState(152); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,1,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(142); + setState(146); explainCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(143); + setState(147); fromCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(144); + setState(148); rowCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(145); + setState(149); showCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(146); + setState(150); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(147); + setState(151); metricsCommand(); } break; @@ -529,117 +530,117 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(168); + setState(172); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(150); + setState(154); evalCommand(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(151); + setState(155); whereCommand(); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(152); + setState(156); keepCommand(); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(153); + setState(157); limitCommand(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(154); + setState(158); statsCommand(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(155); + setState(159); sortCommand(); } break; case 7: enterOuterAlt(_localctx, 7); { - setState(156); + setState(160); dropCommand(); } break; case 8: enterOuterAlt(_localctx, 8); { - setState(157); + setState(161); renameCommand(); } break; case 9: enterOuterAlt(_localctx, 9); { - setState(158); + setState(162); dissectCommand(); } break; case 10: enterOuterAlt(_localctx, 10); { - setState(159); + setState(163); grokCommand(); } break; case 11: enterOuterAlt(_localctx, 11); { - setState(160); + setState(164); enrichCommand(); } break; case 12: enterOuterAlt(_localctx, 12); { - setState(161); + setState(165); mvExpandCommand(); } break; case 13: enterOuterAlt(_localctx, 13); { - setState(162); + setState(166); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(163); + setState(167); inlinestatsCommand(); } break; case 14: enterOuterAlt(_localctx, 14); { - setState(164); + setState(168); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(165); + setState(169); lookupCommand(); } break; case 15: enterOuterAlt(_localctx, 15); { - setState(166); + setState(170); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(167); + setState(171); joinCommand(); } break; @@ -688,9 +689,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(170); + setState(174); match(WHERE); - setState(171); + setState(175); booleanExpression(0); } } @@ -906,7 +907,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(202); + setState(206); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -915,9 +916,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(174); + setState(178); match(NOT); - setState(175); + setState(179); booleanExpression(8); } break; @@ -926,7 +927,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(176); + setState(180); valueExpression(); } break; @@ -935,7 +936,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(177); + setState(181); regexBooleanExpression(); } break; @@ -944,41 +945,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(178); + setState(182); valueExpression(); - setState(180); + setState(184); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(179); + setState(183); match(NOT); } } - setState(182); + setState(186); match(IN); - setState(183); + setState(187); match(LP); - setState(184); + setState(188); valueExpression(); - setState(189); + setState(193); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(185); + setState(189); match(COMMA); - setState(186); + setState(190); valueExpression(); } } - setState(191); + setState(195); _errHandler.sync(this); _la = _input.LA(1); } - setState(192); + setState(196); match(RP); } break; @@ -987,21 +988,21 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(194); + setState(198); valueExpression(); - setState(195); + setState(199); match(IS); - setState(197); + setState(201); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(196); + setState(200); match(NOT); } } - setState(199); + setState(203); match(NULL); } break; @@ -1010,13 +1011,13 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MatchExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(201); + setState(205); matchBooleanExpression(); } break; } _ctx.stop = _input.LT(-1); - setState(212); + setState(216); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1024,7 +1025,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(210); + setState(214); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -1032,11 +1033,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(204); + setState(208); if (!(precpred(_ctx, 5))) throw new FailedPredicateException(this, "precpred(_ctx, 5)"); - setState(205); + setState(209); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(206); + setState(210); ((LogicalBinaryContext)_localctx).right = booleanExpression(6); } break; @@ -1045,18 +1046,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(207); + setState(211); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(208); + setState(212); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(209); + setState(213); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; } } } - setState(214); + setState(218); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1111,48 +1112,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(229); + setState(233); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(215); + setState(219); valueExpression(); - setState(217); + setState(221); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(216); + setState(220); match(NOT); } } - setState(219); + setState(223); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(220); + setState(224); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(222); + setState(226); valueExpression(); - setState(224); + setState(228); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(223); + setState(227); match(NOT); } } - setState(226); + setState(230); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(227); + setState(231); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1212,23 +1213,23 @@ public final MatchBooleanExpressionContext matchBooleanExpression() throws Recog try { enterOuterAlt(_localctx, 1); { - setState(231); + setState(235); ((MatchBooleanExpressionContext)_localctx).fieldExp = qualifiedName(); - setState(234); + setState(238); _errHandler.sync(this); _la = _input.LA(1); if (_la==CAST_OP) { { - setState(232); + setState(236); match(CAST_OP); - setState(233); + setState(237); ((MatchBooleanExpressionContext)_localctx).fieldType = dataType(); } } - setState(236); + setState(240); match(COLON); - setState(237); + setState(241); ((MatchBooleanExpressionContext)_localctx).matchQuery = constant(); } } @@ -1312,14 +1313,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 16, RULE_valueExpression); try { - setState(244); + setState(248); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(239); + setState(243); operatorExpression(0); } break; @@ -1327,11 +1328,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(240); + setState(244); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(241); + setState(245); comparisonOperator(); - setState(242); + setState(246); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1456,7 +1457,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(250); + setState(254); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1465,7 +1466,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(247); + setState(251); primaryExpression(0); } break; @@ -1474,7 +1475,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(248); + setState(252); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1485,13 +1486,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(249); + setState(253); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(260); + setState(264); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,16,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1499,7 +1500,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(258); + setState(262); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,15,_ctx) ) { case 1: @@ -1507,9 +1508,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(252); + setState(256); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(253); + setState(257); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(((((_la - 66)) & ~0x3f) == 0 && ((1L << (_la - 66)) & 7L) != 0)) ) { @@ -1520,7 +1521,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(254); + setState(258); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1529,9 +1530,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(255); + setState(259); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(256); + setState(260); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1542,14 +1543,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(257); + setState(261); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(262); + setState(266); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,16,_ctx); } @@ -1707,7 +1708,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(271); + setState(275); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,17,_ctx) ) { case 1: @@ -1716,7 +1717,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(264); + setState(268); constant(); } break; @@ -1725,7 +1726,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new DereferenceContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(265); + setState(269); qualifiedName(); } break; @@ -1734,7 +1735,7 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new FunctionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(266); + setState(270); functionExpression(); } break; @@ -1743,17 +1744,17 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc _localctx = new ParenthesizedExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(267); + setState(271); match(LP); - setState(268); + setState(272); booleanExpression(0); - setState(269); + setState(273); match(RP); } break; } _ctx.stop = _input.LT(-1); - setState(278); + setState(282); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,18,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1764,16 +1765,16 @@ private PrimaryExpressionContext primaryExpression(int _p) throws RecognitionExc { _localctx = new InlineCastContext(new PrimaryExpressionContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_primaryExpression); - setState(273); + setState(277); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(274); + setState(278); match(CAST_OP); - setState(275); + setState(279); dataType(); } } } - setState(280); + setState(284); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,18,_ctx); } @@ -1808,6 +1809,9 @@ public BooleanExpressionContext booleanExpression(int i) { public TerminalNode COMMA(int i) { return getToken(EsqlBaseParser.COMMA, i); } + public MapExpressionContext mapExpression() { + return getRuleContext(MapExpressionContext.class,0); + } @SuppressWarnings("this-escape") public FunctionExpressionContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -1833,47 +1837,62 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx enterRule(_localctx, 22, RULE_functionExpression); int _la; try { + int _alt; enterOuterAlt(_localctx, 1); { - setState(281); + setState(285); functionName(); - setState(282); + setState(286); match(LP); - setState(292); + setState(300); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: { - setState(283); + setState(287); match(ASTERISK); } break; case 2: { { - setState(284); + setState(288); booleanExpression(0); - setState(289); + setState(293); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,19,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(289); + match(COMMA); + setState(290); + booleanExpression(0); + } + } + } + setState(295); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,19,_ctx); + } + setState(298); _errHandler.sync(this); _la = _input.LA(1); - while (_la==COMMA) { - { + if (_la==COMMA) { { - setState(285); + setState(296); match(COMMA); - setState(286); - booleanExpression(0); - } + setState(297); + mapExpression(); } - setState(291); - _errHandler.sync(this); - _la = _input.LA(1); } + } } break; } - setState(294); + setState(302); match(RP); } } @@ -1919,7 +1938,7 @@ public final FunctionNameContext functionName() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(296); + setState(304); identifierOrParameter(); } } @@ -1934,6 +1953,140 @@ public final FunctionNameContext functionName() throws RecognitionException { return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class MapExpressionContext extends ParserRuleContext { + public TerminalNode LEFT_BRACES() { return getToken(EsqlBaseParser.LEFT_BRACES, 0); } + public List entryExpression() { + return getRuleContexts(EntryExpressionContext.class); + } + public EntryExpressionContext entryExpression(int i) { + return getRuleContext(EntryExpressionContext.class,i); + } + public TerminalNode RIGHT_BRACES() { return getToken(EsqlBaseParser.RIGHT_BRACES, 0); } + public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } + public TerminalNode COMMA(int i) { + return getToken(EsqlBaseParser.COMMA, i); + } + @SuppressWarnings("this-escape") + public MapExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_mapExpression; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterMapExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitMapExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitMapExpression(this); + else return visitor.visitChildren(this); + } + } + + public final MapExpressionContext mapExpression() throws RecognitionException { + MapExpressionContext _localctx = new MapExpressionContext(_ctx, getState()); + enterRule(_localctx, 26, RULE_mapExpression); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(306); + if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); + setState(307); + match(LEFT_BRACES); + setState(308); + entryExpression(); + setState(313); + _errHandler.sync(this); + _la = _input.LA(1); + while (_la==COMMA) { + { + { + setState(309); + match(COMMA); + setState(310); + entryExpression(); + } + } + setState(315); + _errHandler.sync(this); + _la = _input.LA(1); + } + setState(316); + match(RIGHT_BRACES); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + @SuppressWarnings("CheckReturnValue") + public static class EntryExpressionContext extends ParserRuleContext { + public StringContext key; + public ConstantContext value; + public TerminalNode COLON() { return getToken(EsqlBaseParser.COLON, 0); } + public StringContext string() { + return getRuleContext(StringContext.class,0); + } + public ConstantContext constant() { + return getRuleContext(ConstantContext.class,0); + } + @SuppressWarnings("this-escape") + public EntryExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_entryExpression; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterEntryExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitEntryExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitEntryExpression(this); + else return visitor.visitChildren(this); + } + } + + public final EntryExpressionContext entryExpression() throws RecognitionException { + EntryExpressionContext _localctx = new EntryExpressionContext(_ctx, getState()); + enterRule(_localctx, 28, RULE_entryExpression); + try { + enterOuterAlt(_localctx, 1); + { + setState(318); + ((EntryExpressionContext)_localctx).key = string(); + setState(319); + match(COLON); + setState(320); + ((EntryExpressionContext)_localctx).value = constant(); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") public static class DataTypeContext extends ParserRuleContext { @SuppressWarnings("this-escape") @@ -1972,12 +2125,12 @@ public T accept(ParseTreeVisitor visitor) { public final DataTypeContext dataType() throws RecognitionException { DataTypeContext _localctx = new DataTypeContext(_ctx, getState()); - enterRule(_localctx, 26, RULE_dataType); + enterRule(_localctx, 30, RULE_dataType); try { _localctx = new ToDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(298); + setState(322); identifier(); } } @@ -2020,13 +2173,13 @@ public T accept(ParseTreeVisitor visitor) { public final RowCommandContext rowCommand() throws RecognitionException { RowCommandContext _localctx = new RowCommandContext(_ctx, getState()); - enterRule(_localctx, 28, RULE_rowCommand); + enterRule(_localctx, 32, RULE_rowCommand); try { enterOuterAlt(_localctx, 1); { - setState(300); + setState(324); match(ROW); - setState(301); + setState(325); fields(); } } @@ -2075,30 +2228,30 @@ public T accept(ParseTreeVisitor visitor) { public final FieldsContext fields() throws RecognitionException { FieldsContext _localctx = new FieldsContext(_ctx, getState()); - enterRule(_localctx, 30, RULE_fields); + enterRule(_localctx, 34, RULE_fields); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(303); + setState(327); field(); - setState(308); + setState(332); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,21,_ctx); + _alt = getInterpreter().adaptivePredict(_input,23,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(304); + setState(328); match(COMMA); - setState(305); + setState(329); field(); } } } - setState(310); + setState(334); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,21,_ctx); + _alt = getInterpreter().adaptivePredict(_input,23,_ctx); } } } @@ -2144,23 +2297,23 @@ public T accept(ParseTreeVisitor visitor) { public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_field); + enterRule(_localctx, 36, RULE_field); try { enterOuterAlt(_localctx, 1); { - setState(314); + setState(338); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(311); + setState(335); qualifiedName(); - setState(312); + setState(336); match(ASSIGN); } break; } - setState(316); + setState(340); booleanExpression(0); } } @@ -2213,39 +2366,39 @@ public T accept(ParseTreeVisitor visitor) { public final FromCommandContext fromCommand() throws RecognitionException { FromCommandContext _localctx = new FromCommandContext(_ctx, getState()); - enterRule(_localctx, 34, RULE_fromCommand); + enterRule(_localctx, 38, RULE_fromCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(318); + setState(342); match(FROM); - setState(319); + setState(343); indexPattern(); - setState(324); + setState(348); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,23,_ctx); + _alt = getInterpreter().adaptivePredict(_input,25,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(320); + setState(344); match(COMMA); - setState(321); + setState(345); indexPattern(); } } } - setState(326); + setState(350); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,23,_ctx); + _alt = getInterpreter().adaptivePredict(_input,25,_ctx); } - setState(328); + setState(352); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { case 1: { - setState(327); + setState(351); metadata(); } break; @@ -2294,23 +2447,23 @@ public T accept(ParseTreeVisitor visitor) { public final IndexPatternContext indexPattern() throws RecognitionException { IndexPatternContext _localctx = new IndexPatternContext(_ctx, getState()); - enterRule(_localctx, 36, RULE_indexPattern); + enterRule(_localctx, 40, RULE_indexPattern); try { enterOuterAlt(_localctx, 1); { - setState(333); + setState(357); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,27,_ctx) ) { case 1: { - setState(330); + setState(354); clusterString(); - setState(331); + setState(355); match(COLON); } break; } - setState(335); + setState(359); indexString(); } } @@ -2350,11 +2503,11 @@ public T accept(ParseTreeVisitor visitor) { public final ClusterStringContext clusterString() throws RecognitionException { ClusterStringContext _localctx = new ClusterStringContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_clusterString); + enterRule(_localctx, 42, RULE_clusterString); try { enterOuterAlt(_localctx, 1); { - setState(337); + setState(361); match(UNQUOTED_SOURCE); } } @@ -2395,12 +2548,12 @@ public T accept(ParseTreeVisitor visitor) { public final IndexStringContext indexString() throws RecognitionException { IndexStringContext _localctx = new IndexStringContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_indexString); + enterRule(_localctx, 44, RULE_indexString); int _la; try { enterOuterAlt(_localctx, 1); { - setState(339); + setState(363); _la = _input.LA(1); if ( !(_la==QUOTED_STRING || _la==UNQUOTED_SOURCE) ) { _errHandler.recoverInline(this); @@ -2456,32 +2609,32 @@ public T accept(ParseTreeVisitor visitor) { public final MetadataContext metadata() throws RecognitionException { MetadataContext _localctx = new MetadataContext(_ctx, getState()); - enterRule(_localctx, 42, RULE_metadata); + enterRule(_localctx, 46, RULE_metadata); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(341); + setState(365); match(METADATA); - setState(342); + setState(366); match(UNQUOTED_SOURCE); - setState(347); + setState(371); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,26,_ctx); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(343); + setState(367); match(COMMA); - setState(344); + setState(368); match(UNQUOTED_SOURCE); } } } - setState(349); + setState(373); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,26,_ctx); + _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } } } @@ -2540,51 +2693,51 @@ public T accept(ParseTreeVisitor visitor) { public final MetricsCommandContext metricsCommand() throws RecognitionException { MetricsCommandContext _localctx = new MetricsCommandContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_metricsCommand); + enterRule(_localctx, 48, RULE_metricsCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(350); + setState(374); match(DEV_METRICS); - setState(351); + setState(375); indexPattern(); - setState(356); + setState(380); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,27,_ctx); + _alt = getInterpreter().adaptivePredict(_input,29,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(352); + setState(376); match(COMMA); - setState(353); + setState(377); indexPattern(); } } } - setState(358); + setState(382); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,27,_ctx); + _alt = getInterpreter().adaptivePredict(_input,29,_ctx); } - setState(360); + setState(384); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,28,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { case 1: { - setState(359); + setState(383); ((MetricsCommandContext)_localctx).aggregates = aggFields(); } break; } - setState(364); + setState(388); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,29,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { case 1: { - setState(362); + setState(386); match(BY); - setState(363); + setState(387); ((MetricsCommandContext)_localctx).grouping = fields(); } break; @@ -2630,13 +2783,13 @@ public T accept(ParseTreeVisitor visitor) { public final EvalCommandContext evalCommand() throws RecognitionException { EvalCommandContext _localctx = new EvalCommandContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_evalCommand); + enterRule(_localctx, 50, RULE_evalCommand); try { enterOuterAlt(_localctx, 1); { - setState(366); + setState(390); match(EVAL); - setState(367); + setState(391); fields(); } } @@ -2685,30 +2838,30 @@ public T accept(ParseTreeVisitor visitor) { public final StatsCommandContext statsCommand() throws RecognitionException { StatsCommandContext _localctx = new StatsCommandContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_statsCommand); + enterRule(_localctx, 52, RULE_statsCommand); try { enterOuterAlt(_localctx, 1); { - setState(369); + setState(393); match(STATS); - setState(371); + setState(395); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,30,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { case 1: { - setState(370); + setState(394); ((StatsCommandContext)_localctx).stats = aggFields(); } break; } - setState(375); + setState(399); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,31,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: { - setState(373); + setState(397); match(BY); - setState(374); + setState(398); ((StatsCommandContext)_localctx).grouping = fields(); } break; @@ -2760,30 +2913,30 @@ public T accept(ParseTreeVisitor visitor) { public final AggFieldsContext aggFields() throws RecognitionException { AggFieldsContext _localctx = new AggFieldsContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_aggFields); + enterRule(_localctx, 54, RULE_aggFields); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(377); + setState(401); aggField(); - setState(382); + setState(406); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,32,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(378); + setState(402); match(COMMA); - setState(379); + setState(403); aggField(); } } } - setState(384); + setState(408); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,32,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } } } @@ -2829,20 +2982,20 @@ public T accept(ParseTreeVisitor visitor) { public final AggFieldContext aggField() throws RecognitionException { AggFieldContext _localctx = new AggFieldContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_aggField); + enterRule(_localctx, 56, RULE_aggField); try { enterOuterAlt(_localctx, 1); { - setState(385); + setState(409); field(); - setState(388); + setState(412); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: { - setState(386); + setState(410); match(WHERE); - setState(387); + setState(411); booleanExpression(0); } break; @@ -2894,30 +3047,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_qualifiedName); + enterRule(_localctx, 58, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(390); + setState(414); identifierOrParameter(); - setState(395); + setState(419); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,34,_ctx); + _alt = getInterpreter().adaptivePredict(_input,36,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(391); + setState(415); match(DOT); - setState(392); + setState(416); identifierOrParameter(); } } } - setState(397); + setState(421); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,34,_ctx); + _alt = getInterpreter().adaptivePredict(_input,36,_ctx); } } } @@ -2966,30 +3119,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternContext qualifiedNamePattern() throws RecognitionException { QualifiedNamePatternContext _localctx = new QualifiedNamePatternContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_qualifiedNamePattern); + enterRule(_localctx, 60, RULE_qualifiedNamePattern); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(398); + setState(422); identifierPattern(); - setState(403); + setState(427); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,35,_ctx); + _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(399); + setState(423); match(DOT); - setState(400); + setState(424); identifierPattern(); } } } - setState(405); + setState(429); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,35,_ctx); + _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } } } @@ -3038,30 +3191,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNamePatternsContext qualifiedNamePatterns() throws RecognitionException { QualifiedNamePatternsContext _localctx = new QualifiedNamePatternsContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_qualifiedNamePatterns); + enterRule(_localctx, 62, RULE_qualifiedNamePatterns); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(406); + setState(430); qualifiedNamePattern(); - setState(411); + setState(435); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,36,_ctx); + _alt = getInterpreter().adaptivePredict(_input,38,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(407); + setState(431); match(COMMA); - setState(408); + setState(432); qualifiedNamePattern(); } } } - setState(413); + setState(437); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,36,_ctx); + _alt = getInterpreter().adaptivePredict(_input,38,_ctx); } } } @@ -3102,12 +3255,12 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_identifier); + enterRule(_localctx, 64, RULE_identifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(414); + setState(438); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -3158,24 +3311,24 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierPatternContext identifierPattern() throws RecognitionException { IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_identifierPattern); + enterRule(_localctx, 66, RULE_identifierPattern); try { - setState(419); + setState(443); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,37,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,39,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(416); + setState(440); match(ID_PATTERN); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(417); + setState(441); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(418); + setState(442); parameter(); } break; @@ -3445,17 +3598,17 @@ public T accept(ParseTreeVisitor visitor) { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_constant); + enterRule(_localctx, 68, RULE_constant); int _la; try { - setState(463); + setState(487); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(421); + setState(445); match(NULL); } break; @@ -3463,9 +3616,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(422); + setState(446); integerValue(); - setState(423); + setState(447); match(UNQUOTED_IDENTIFIER); } break; @@ -3473,7 +3626,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(425); + setState(449); decimalValue(); } break; @@ -3481,7 +3634,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(426); + setState(450); integerValue(); } break; @@ -3489,7 +3642,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(427); + setState(451); booleanValue(); } break; @@ -3497,7 +3650,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParameterContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(428); + setState(452); parameter(); } break; @@ -3505,7 +3658,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(429); + setState(453); string(); } break; @@ -3513,27 +3666,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(430); + setState(454); match(OPENING_BRACKET); - setState(431); + setState(455); numericValue(); - setState(436); + setState(460); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(432); + setState(456); match(COMMA); - setState(433); + setState(457); numericValue(); } } - setState(438); + setState(462); _errHandler.sync(this); _la = _input.LA(1); } - setState(439); + setState(463); match(CLOSING_BRACKET); } break; @@ -3541,27 +3694,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(441); + setState(465); match(OPENING_BRACKET); - setState(442); + setState(466); booleanValue(); - setState(447); + setState(471); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(443); + setState(467); match(COMMA); - setState(444); + setState(468); booleanValue(); } } - setState(449); + setState(473); _errHandler.sync(this); _la = _input.LA(1); } - setState(450); + setState(474); match(CLOSING_BRACKET); } break; @@ -3569,27 +3722,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(452); + setState(476); match(OPENING_BRACKET); - setState(453); + setState(477); string(); - setState(458); + setState(482); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(454); + setState(478); match(COMMA); - setState(455); + setState(479); string(); } } - setState(460); + setState(484); _errHandler.sync(this); _la = _input.LA(1); } - setState(461); + setState(485); match(CLOSING_BRACKET); } break; @@ -3661,16 +3814,16 @@ public T accept(ParseTreeVisitor visitor) { public final ParameterContext parameter() throws RecognitionException { ParameterContext _localctx = new ParameterContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_parameter); + enterRule(_localctx, 70, RULE_parameter); try { - setState(467); + setState(491); _errHandler.sync(this); switch (_input.LA(1)) { case PARAM: _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(465); + setState(489); match(PARAM); } break; @@ -3678,7 +3831,7 @@ public final ParameterContext parameter() throws RecognitionException { _localctx = new InputNamedOrPositionalParamContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(466); + setState(490); match(NAMED_OR_POSITIONAL_PARAM); } break; @@ -3727,24 +3880,24 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierOrParameterContext identifierOrParameter() throws RecognitionException { IdentifierOrParameterContext _localctx = new IdentifierOrParameterContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_identifierOrParameter); + enterRule(_localctx, 72, RULE_identifierOrParameter); try { - setState(472); + setState(496); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(469); + setState(493); identifier(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(470); + setState(494); if (!(this.isDevVersion())) throw new FailedPredicateException(this, "this.isDevVersion()"); - setState(471); + setState(495); parameter(); } break; @@ -3787,13 +3940,13 @@ public T accept(ParseTreeVisitor visitor) { public final LimitCommandContext limitCommand() throws RecognitionException { LimitCommandContext _localctx = new LimitCommandContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_limitCommand); + enterRule(_localctx, 74, RULE_limitCommand); try { enterOuterAlt(_localctx, 1); { - setState(474); + setState(498); match(LIMIT); - setState(475); + setState(499); match(INTEGER_LITERAL); } } @@ -3843,32 +3996,32 @@ public T accept(ParseTreeVisitor visitor) { public final SortCommandContext sortCommand() throws RecognitionException { SortCommandContext _localctx = new SortCommandContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_sortCommand); + enterRule(_localctx, 76, RULE_sortCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(477); + setState(501); match(SORT); - setState(478); + setState(502); orderExpression(); - setState(483); + setState(507); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,44,_ctx); + _alt = getInterpreter().adaptivePredict(_input,46,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(479); + setState(503); match(COMMA); - setState(480); + setState(504); orderExpression(); } } } - setState(485); + setState(509); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,44,_ctx); + _alt = getInterpreter().adaptivePredict(_input,46,_ctx); } } } @@ -3917,19 +4070,19 @@ public T accept(ParseTreeVisitor visitor) { public final OrderExpressionContext orderExpression() throws RecognitionException { OrderExpressionContext _localctx = new OrderExpressionContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_orderExpression); + enterRule(_localctx, 78, RULE_orderExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(486); + setState(510); booleanExpression(0); - setState(488); + setState(512); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,45,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: { - setState(487); + setState(511); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3943,14 +4096,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(492); + setState(516); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(490); + setState(514); match(NULLS); - setState(491); + setState(515); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -4005,13 +4158,13 @@ public T accept(ParseTreeVisitor visitor) { public final KeepCommandContext keepCommand() throws RecognitionException { KeepCommandContext _localctx = new KeepCommandContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_keepCommand); + enterRule(_localctx, 80, RULE_keepCommand); try { enterOuterAlt(_localctx, 1); { - setState(494); + setState(518); match(KEEP); - setState(495); + setState(519); qualifiedNamePatterns(); } } @@ -4054,13 +4207,13 @@ public T accept(ParseTreeVisitor visitor) { public final DropCommandContext dropCommand() throws RecognitionException { DropCommandContext _localctx = new DropCommandContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_dropCommand); + enterRule(_localctx, 82, RULE_dropCommand); try { enterOuterAlt(_localctx, 1); { - setState(497); + setState(521); match(DROP); - setState(498); + setState(522); qualifiedNamePatterns(); } } @@ -4110,32 +4263,32 @@ public T accept(ParseTreeVisitor visitor) { public final RenameCommandContext renameCommand() throws RecognitionException { RenameCommandContext _localctx = new RenameCommandContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_renameCommand); + enterRule(_localctx, 84, RULE_renameCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(500); + setState(524); match(RENAME); - setState(501); + setState(525); renameClause(); - setState(506); + setState(530); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,47,_ctx); + _alt = getInterpreter().adaptivePredict(_input,49,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(502); + setState(526); match(COMMA); - setState(503); + setState(527); renameClause(); } } } - setState(508); + setState(532); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,47,_ctx); + _alt = getInterpreter().adaptivePredict(_input,49,_ctx); } } } @@ -4183,15 +4336,15 @@ public T accept(ParseTreeVisitor visitor) { public final RenameClauseContext renameClause() throws RecognitionException { RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_renameClause); + enterRule(_localctx, 86, RULE_renameClause); try { enterOuterAlt(_localctx, 1); { - setState(509); + setState(533); ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); - setState(510); + setState(534); match(AS); - setState(511); + setState(535); ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } @@ -4240,22 +4393,22 @@ public T accept(ParseTreeVisitor visitor) { public final DissectCommandContext dissectCommand() throws RecognitionException { DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_dissectCommand); + enterRule(_localctx, 88, RULE_dissectCommand); try { enterOuterAlt(_localctx, 1); { - setState(513); + setState(537); match(DISSECT); - setState(514); + setState(538); primaryExpression(0); - setState(515); + setState(539); string(); - setState(517); + setState(541); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: { - setState(516); + setState(540); commandOptions(); } break; @@ -4304,15 +4457,15 @@ public T accept(ParseTreeVisitor visitor) { public final GrokCommandContext grokCommand() throws RecognitionException { GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_grokCommand); + enterRule(_localctx, 90, RULE_grokCommand); try { enterOuterAlt(_localctx, 1); { - setState(519); + setState(543); match(GROK); - setState(520); + setState(544); primaryExpression(0); - setState(521); + setState(545); string(); } } @@ -4355,13 +4508,13 @@ public T accept(ParseTreeVisitor visitor) { public final MvExpandCommandContext mvExpandCommand() throws RecognitionException { MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_mvExpandCommand); + enterRule(_localctx, 92, RULE_mvExpandCommand); try { enterOuterAlt(_localctx, 1); { - setState(523); + setState(547); match(MV_EXPAND); - setState(524); + setState(548); qualifiedName(); } } @@ -4410,30 +4563,30 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionsContext commandOptions() throws RecognitionException { CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_commandOptions); + enterRule(_localctx, 94, RULE_commandOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(526); + setState(550); commandOption(); - setState(531); + setState(555); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,49,_ctx); + _alt = getInterpreter().adaptivePredict(_input,51,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(527); + setState(551); match(COMMA); - setState(528); + setState(552); commandOption(); } } } - setState(533); + setState(557); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,49,_ctx); + _alt = getInterpreter().adaptivePredict(_input,51,_ctx); } } } @@ -4479,15 +4632,15 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionContext commandOption() throws RecognitionException { CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_commandOption); + enterRule(_localctx, 96, RULE_commandOption); try { enterOuterAlt(_localctx, 1); { - setState(534); + setState(558); identifier(); - setState(535); + setState(559); match(ASSIGN); - setState(536); + setState(560); constant(); } } @@ -4528,12 +4681,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 94, RULE_booleanValue); + enterRule(_localctx, 98, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(538); + setState(562); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4586,22 +4739,22 @@ public T accept(ParseTreeVisitor visitor) { public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); - enterRule(_localctx, 96, RULE_numericValue); + enterRule(_localctx, 100, RULE_numericValue); try { - setState(542); + setState(566); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,52,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(540); + setState(564); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(541); + setState(565); integerValue(); } break; @@ -4645,17 +4798,17 @@ public T accept(ParseTreeVisitor visitor) { public final DecimalValueContext decimalValue() throws RecognitionException { DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState()); - enterRule(_localctx, 98, RULE_decimalValue); + enterRule(_localctx, 102, RULE_decimalValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(545); + setState(569); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(544); + setState(568); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4668,7 +4821,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(547); + setState(571); match(DECIMAL_LITERAL); } } @@ -4710,17 +4863,17 @@ public T accept(ParseTreeVisitor visitor) { public final IntegerValueContext integerValue() throws RecognitionException { IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState()); - enterRule(_localctx, 100, RULE_integerValue); + enterRule(_localctx, 104, RULE_integerValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(550); + setState(574); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(549); + setState(573); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -4733,7 +4886,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(552); + setState(576); match(INTEGER_LITERAL); } } @@ -4773,11 +4926,11 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 102, RULE_string); + enterRule(_localctx, 106, RULE_string); try { enterOuterAlt(_localctx, 1); { - setState(554); + setState(578); match(QUOTED_STRING); } } @@ -4822,12 +4975,12 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 104, RULE_comparisonOperator); + enterRule(_localctx, 108, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(556); + setState(580); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & -432345564227567616L) != 0)) ) { _errHandler.recoverInline(this); @@ -4878,13 +5031,13 @@ public T accept(ParseTreeVisitor visitor) { public final ExplainCommandContext explainCommand() throws RecognitionException { ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState()); - enterRule(_localctx, 106, RULE_explainCommand); + enterRule(_localctx, 110, RULE_explainCommand); try { enterOuterAlt(_localctx, 1); { - setState(558); + setState(582); match(EXPLAIN); - setState(559); + setState(583); subqueryExpression(); } } @@ -4928,15 +5081,15 @@ public T accept(ParseTreeVisitor visitor) { public final SubqueryExpressionContext subqueryExpression() throws RecognitionException { SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState()); - enterRule(_localctx, 108, RULE_subqueryExpression); + enterRule(_localctx, 112, RULE_subqueryExpression); try { enterOuterAlt(_localctx, 1); { - setState(561); + setState(585); match(OPENING_BRACKET); - setState(562); + setState(586); query(0); - setState(563); + setState(587); match(CLOSING_BRACKET); } } @@ -4988,14 +5141,14 @@ public T accept(ParseTreeVisitor visitor) { public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 110, RULE_showCommand); + enterRule(_localctx, 114, RULE_showCommand); try { _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(565); + setState(589); match(SHOW); - setState(566); + setState(590); match(INFO); } } @@ -5053,53 +5206,53 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 112, RULE_enrichCommand); + enterRule(_localctx, 116, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(568); + setState(592); match(ENRICH); - setState(569); + setState(593); ((EnrichCommandContext)_localctx).policyName = match(ENRICH_POLICY_NAME); - setState(572); + setState(596); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,53,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { case 1: { - setState(570); + setState(594); match(ON); - setState(571); + setState(595); ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(583); + setState(607); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,55,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { case 1: { - setState(574); + setState(598); match(WITH); - setState(575); + setState(599); enrichWithClause(); - setState(580); + setState(604); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,54,_ctx); + _alt = getInterpreter().adaptivePredict(_input,56,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(576); + setState(600); match(COMMA); - setState(577); + setState(601); enrichWithClause(); } } } - setState(582); + setState(606); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,54,_ctx); + _alt = getInterpreter().adaptivePredict(_input,56,_ctx); } } break; @@ -5150,23 +5303,23 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 114, RULE_enrichWithClause); + enterRule(_localctx, 118, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(588); + setState(612); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,56,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,58,_ctx) ) { case 1: { - setState(585); + setState(609); ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); - setState(586); + setState(610); match(ASSIGN); } break; } - setState(590); + setState(614); ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } @@ -5215,17 +5368,17 @@ public T accept(ParseTreeVisitor visitor) { public final LookupCommandContext lookupCommand() throws RecognitionException { LookupCommandContext _localctx = new LookupCommandContext(_ctx, getState()); - enterRule(_localctx, 116, RULE_lookupCommand); + enterRule(_localctx, 120, RULE_lookupCommand); try { enterOuterAlt(_localctx, 1); { - setState(592); + setState(616); match(DEV_LOOKUP); - setState(593); + setState(617); ((LookupCommandContext)_localctx).tableName = indexPattern(); - setState(594); + setState(618); match(ON); - setState(595); + setState(619); ((LookupCommandContext)_localctx).matchFields = qualifiedNamePatterns(); } } @@ -5274,22 +5427,22 @@ public T accept(ParseTreeVisitor visitor) { public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionException { InlinestatsCommandContext _localctx = new InlinestatsCommandContext(_ctx, getState()); - enterRule(_localctx, 118, RULE_inlinestatsCommand); + enterRule(_localctx, 122, RULE_inlinestatsCommand); try { enterOuterAlt(_localctx, 1); { - setState(597); + setState(621); match(DEV_INLINESTATS); - setState(598); + setState(622); ((InlinestatsCommandContext)_localctx).stats = aggFields(); - setState(601); + setState(625); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,57,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,59,_ctx) ) { case 1: { - setState(599); + setState(623); match(BY); - setState(600); + setState(624); ((InlinestatsCommandContext)_localctx).grouping = fields(); } break; @@ -5342,17 +5495,17 @@ public T accept(ParseTreeVisitor visitor) { public final JoinCommandContext joinCommand() throws RecognitionException { JoinCommandContext _localctx = new JoinCommandContext(_ctx, getState()); - enterRule(_localctx, 120, RULE_joinCommand); + enterRule(_localctx, 124, RULE_joinCommand); int _la; try { enterOuterAlt(_localctx, 1); { - setState(604); + setState(628); _errHandler.sync(this); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) { { - setState(603); + setState(627); ((JoinCommandContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & 29360128L) != 0)) ) { @@ -5366,11 +5519,11 @@ public final JoinCommandContext joinCommand() throws RecognitionException { } } - setState(606); + setState(630); match(DEV_JOIN); - setState(607); + setState(631); joinTarget(); - setState(608); + setState(632); joinCondition(); } } @@ -5418,21 +5571,21 @@ public T accept(ParseTreeVisitor visitor) { public final JoinTargetContext joinTarget() throws RecognitionException { JoinTargetContext _localctx = new JoinTargetContext(_ctx, getState()); - enterRule(_localctx, 122, RULE_joinTarget); + enterRule(_localctx, 126, RULE_joinTarget); int _la; try { enterOuterAlt(_localctx, 1); { - setState(610); + setState(634); ((JoinTargetContext)_localctx).index = identifier(); - setState(613); + setState(637); _errHandler.sync(this); _la = _input.LA(1); if (_la==AS) { { - setState(611); + setState(635); match(AS); - setState(612); + setState(636); ((JoinTargetContext)_localctx).alias = identifier(); } } @@ -5485,32 +5638,32 @@ public T accept(ParseTreeVisitor visitor) { public final JoinConditionContext joinCondition() throws RecognitionException { JoinConditionContext _localctx = new JoinConditionContext(_ctx, getState()); - enterRule(_localctx, 124, RULE_joinCondition); + enterRule(_localctx, 128, RULE_joinCondition); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(615); + setState(639); match(ON); - setState(616); + setState(640); joinPredicate(); - setState(621); + setState(645); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,60,_ctx); + _alt = getInterpreter().adaptivePredict(_input,62,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(617); + setState(641); match(COMMA); - setState(618); + setState(642); joinPredicate(); } } } - setState(623); + setState(647); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,60,_ctx); + _alt = getInterpreter().adaptivePredict(_input,62,_ctx); } } } @@ -5552,11 +5705,11 @@ public T accept(ParseTreeVisitor visitor) { public final JoinPredicateContext joinPredicate() throws RecognitionException { JoinPredicateContext _localctx = new JoinPredicateContext(_ctx, getState()); - enterRule(_localctx, 126, RULE_joinPredicate); + enterRule(_localctx, 130, RULE_joinPredicate); try { enterOuterAlt(_localctx, 1); { - setState(624); + setState(648); valueExpression(); } } @@ -5585,9 +5738,11 @@ public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { return operatorExpression_sempred((OperatorExpressionContext)_localctx, predIndex); case 10: return primaryExpression_sempred((PrimaryExpressionContext)_localctx, predIndex); - case 31: + case 13: + return mapExpression_sempred((MapExpressionContext)_localctx, predIndex); + case 33: return identifierPattern_sempred((IdentifierPatternContext)_localctx, predIndex); - case 34: + case 36: return identifierOrParameter_sempred((IdentifierOrParameterContext)_localctx, predIndex); } return true; @@ -5642,23 +5797,30 @@ private boolean primaryExpression_sempred(PrimaryExpressionContext _localctx, in } return true; } - private boolean identifierPattern_sempred(IdentifierPatternContext _localctx, int predIndex) { + private boolean mapExpression_sempred(MapExpressionContext _localctx, int predIndex) { switch (predIndex) { case 10: return this.isDevVersion(); } return true; } - private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _localctx, int predIndex) { + private boolean identifierPattern_sempred(IdentifierPatternContext _localctx, int predIndex) { switch (predIndex) { case 11: return this.isDevVersion(); } return true; } + private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _localctx, int predIndex) { + switch (predIndex) { + case 12: + return this.isDevVersion(); + } + return true; + } public static final String _serializedATN = - "\u0004\u0001\u0080\u0273\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ + "\u0004\u0001\u0082\u028b\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001"+ "\u0002\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004"+ "\u0002\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007"+ "\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b"+ @@ -5674,385 +5836,401 @@ private boolean identifierOrParameter_sempred(IdentifierOrParameterContext _loca ",\u0002-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u0007"+ "1\u00022\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u0007"+ "6\u00027\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007"+ - ";\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0001\u0000"+ - "\u0001\u0000\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0005\u0001\u008a\b\u0001\n\u0001\f\u0001\u008d"+ - "\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ - "\u0002\u0003\u0002\u0095\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001"+ - "\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u00a9\b\u0003\u0001\u0004\u0001"+ - "\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00b5\b\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00bc\b\u0005\n"+ - "\u0005\f\u0005\u00bf\t\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0003\u0005\u00c6\b\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0003\u0005\u00cb\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00d3\b\u0005\n\u0005\f\u0005"+ - "\u00d6\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00da\b\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00e1"+ - "\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00e6\b\u0006"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00eb\b\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0003"+ - "\b\u00f5\b\b\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00fb\b\t\u0001\t"+ - "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0005\t\u0103\b\t\n\t\f\t\u0106"+ - "\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0003"+ - "\n\u0110\b\n\u0001\n\u0001\n\u0001\n\u0005\n\u0115\b\n\n\n\f\n\u0118\t"+ - "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0005\u000b\u0120\b\u000b\n\u000b\f\u000b\u0123\t\u000b\u0003\u000b\u0125"+ - "\b\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\r\u0001\r\u0001"+ - "\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0005"+ - "\u000f\u0133\b\u000f\n\u000f\f\u000f\u0136\t\u000f\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0003\u0010\u013b\b\u0010\u0001\u0010\u0001\u0010\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0005\u0011\u0143\b\u0011\n\u0011"+ - "\f\u0011\u0146\t\u0011\u0001\u0011\u0003\u0011\u0149\b\u0011\u0001\u0012"+ - "\u0001\u0012\u0001\u0012\u0003\u0012\u014e\b\u0012\u0001\u0012\u0001\u0012"+ - "\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015"+ - "\u0001\u0015\u0001\u0015\u0005\u0015\u015a\b\u0015\n\u0015\f\u0015\u015d"+ - "\t\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0005\u0016\u0163"+ - "\b\u0016\n\u0016\f\u0016\u0166\t\u0016\u0001\u0016\u0003\u0016\u0169\b"+ - "\u0016\u0001\u0016\u0001\u0016\u0003\u0016\u016d\b\u0016\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0003\u0018\u0174\b\u0018\u0001"+ - "\u0018\u0001\u0018\u0003\u0018\u0178\b\u0018\u0001\u0019\u0001\u0019\u0001"+ - "\u0019\u0005\u0019\u017d\b\u0019\n\u0019\f\u0019\u0180\t\u0019\u0001\u001a"+ - "\u0001\u001a\u0001\u001a\u0003\u001a\u0185\b\u001a\u0001\u001b\u0001\u001b"+ - "\u0001\u001b\u0005\u001b\u018a\b\u001b\n\u001b\f\u001b\u018d\t\u001b\u0001"+ - "\u001c\u0001\u001c\u0001\u001c\u0005\u001c\u0192\b\u001c\n\u001c\f\u001c"+ - "\u0195\t\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u019a\b"+ - "\u001d\n\u001d\f\u001d\u019d\t\u001d\u0001\u001e\u0001\u001e\u0001\u001f"+ - "\u0001\u001f\u0001\u001f\u0003\u001f\u01a4\b\u001f\u0001 \u0001 \u0001"+ - " \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001 \u0001"+ - " \u0005 \u01b3\b \n \f \u01b6\t \u0001 \u0001 \u0001 \u0001 \u0001 \u0001"+ - " \u0005 \u01be\b \n \f \u01c1\t \u0001 \u0001 \u0001 \u0001 \u0001 \u0001"+ - " \u0005 \u01c9\b \n \f \u01cc\t \u0001 \u0001 \u0003 \u01d0\b \u0001!"+ - "\u0001!\u0003!\u01d4\b!\u0001\"\u0001\"\u0001\"\u0003\"\u01d9\b\"\u0001"+ - "#\u0001#\u0001#\u0001$\u0001$\u0001$\u0001$\u0005$\u01e2\b$\n$\f$\u01e5"+ - "\t$\u0001%\u0001%\u0003%\u01e9\b%\u0001%\u0001%\u0003%\u01ed\b%\u0001"+ - "&\u0001&\u0001&\u0001\'\u0001\'\u0001\'\u0001(\u0001(\u0001(\u0001(\u0005"+ - "(\u01f9\b(\n(\f(\u01fc\t(\u0001)\u0001)\u0001)\u0001)\u0001*\u0001*\u0001"+ - "*\u0001*\u0003*\u0206\b*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001"+ - ",\u0001-\u0001-\u0001-\u0005-\u0212\b-\n-\f-\u0215\t-\u0001.\u0001.\u0001"+ - ".\u0001.\u0001/\u0001/\u00010\u00010\u00030\u021f\b0\u00011\u00031\u0222"+ - "\b1\u00011\u00011\u00012\u00032\u0227\b2\u00012\u00012\u00013\u00013\u0001"+ - "4\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u00017\u0001"+ - "7\u00017\u00018\u00018\u00018\u00018\u00038\u023d\b8\u00018\u00018\u0001"+ - "8\u00018\u00058\u0243\b8\n8\f8\u0246\t8\u00038\u0248\b8\u00019\u00019"+ - "\u00019\u00039\u024d\b9\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0001"+ - ":\u0001;\u0001;\u0001;\u0001;\u0003;\u025a\b;\u0001<\u0003<\u025d\b<\u0001"+ - "<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001=\u0003=\u0266\b=\u0001>\u0001"+ - ">\u0001>\u0001>\u0005>\u026c\b>\n>\f>\u026f\t>\u0001?\u0001?\u0001?\u0000"+ - "\u0004\u0002\n\u0012\u0014@\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ - "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ - "TVXZ\\^`bdfhjlnprtvxz|~\u0000\t\u0001\u0000@A\u0001\u0000BD\u0002\u0000"+ - "\u001e\u001eQQ\u0001\u0000HI\u0002\u0000##((\u0002\u0000++..\u0002\u0000"+ - "**88\u0002\u000099;?\u0001\u0000\u0016\u0018\u028e\u0000\u0080\u0001\u0000"+ - "\u0000\u0000\u0002\u0083\u0001\u0000\u0000\u0000\u0004\u0094\u0001\u0000"+ - "\u0000\u0000\u0006\u00a8\u0001\u0000\u0000\u0000\b\u00aa\u0001\u0000\u0000"+ - "\u0000\n\u00ca\u0001\u0000\u0000\u0000\f\u00e5\u0001\u0000\u0000\u0000"+ - "\u000e\u00e7\u0001\u0000\u0000\u0000\u0010\u00f4\u0001\u0000\u0000\u0000"+ - "\u0012\u00fa\u0001\u0000\u0000\u0000\u0014\u010f\u0001\u0000\u0000\u0000"+ - "\u0016\u0119\u0001\u0000\u0000\u0000\u0018\u0128\u0001\u0000\u0000\u0000"+ - "\u001a\u012a\u0001\u0000\u0000\u0000\u001c\u012c\u0001\u0000\u0000\u0000"+ - "\u001e\u012f\u0001\u0000\u0000\u0000 \u013a\u0001\u0000\u0000\u0000\""+ - "\u013e\u0001\u0000\u0000\u0000$\u014d\u0001\u0000\u0000\u0000&\u0151\u0001"+ - "\u0000\u0000\u0000(\u0153\u0001\u0000\u0000\u0000*\u0155\u0001\u0000\u0000"+ - "\u0000,\u015e\u0001\u0000\u0000\u0000.\u016e\u0001\u0000\u0000\u00000"+ - "\u0171\u0001\u0000\u0000\u00002\u0179\u0001\u0000\u0000\u00004\u0181\u0001"+ - "\u0000\u0000\u00006\u0186\u0001\u0000\u0000\u00008\u018e\u0001\u0000\u0000"+ - "\u0000:\u0196\u0001\u0000\u0000\u0000<\u019e\u0001\u0000\u0000\u0000>"+ - "\u01a3\u0001\u0000\u0000\u0000@\u01cf\u0001\u0000\u0000\u0000B\u01d3\u0001"+ - "\u0000\u0000\u0000D\u01d8\u0001\u0000\u0000\u0000F\u01da\u0001\u0000\u0000"+ - "\u0000H\u01dd\u0001\u0000\u0000\u0000J\u01e6\u0001\u0000\u0000\u0000L"+ - "\u01ee\u0001\u0000\u0000\u0000N\u01f1\u0001\u0000\u0000\u0000P\u01f4\u0001"+ - "\u0000\u0000\u0000R\u01fd\u0001\u0000\u0000\u0000T\u0201\u0001\u0000\u0000"+ - "\u0000V\u0207\u0001\u0000\u0000\u0000X\u020b\u0001\u0000\u0000\u0000Z"+ - "\u020e\u0001\u0000\u0000\u0000\\\u0216\u0001\u0000\u0000\u0000^\u021a"+ - "\u0001\u0000\u0000\u0000`\u021e\u0001\u0000\u0000\u0000b\u0221\u0001\u0000"+ - "\u0000\u0000d\u0226\u0001\u0000\u0000\u0000f\u022a\u0001\u0000\u0000\u0000"+ - "h\u022c\u0001\u0000\u0000\u0000j\u022e\u0001\u0000\u0000\u0000l\u0231"+ - "\u0001\u0000\u0000\u0000n\u0235\u0001\u0000\u0000\u0000p\u0238\u0001\u0000"+ - "\u0000\u0000r\u024c\u0001\u0000\u0000\u0000t\u0250\u0001\u0000\u0000\u0000"+ - "v\u0255\u0001\u0000\u0000\u0000x\u025c\u0001\u0000\u0000\u0000z\u0262"+ - "\u0001\u0000\u0000\u0000|\u0267\u0001\u0000\u0000\u0000~\u0270\u0001\u0000"+ - "\u0000\u0000\u0080\u0081\u0003\u0002\u0001\u0000\u0081\u0082\u0005\u0000"+ - "\u0000\u0001\u0082\u0001\u0001\u0000\u0000\u0000\u0083\u0084\u0006\u0001"+ - "\uffff\uffff\u0000\u0084\u0085\u0003\u0004\u0002\u0000\u0085\u008b\u0001"+ - "\u0000\u0000\u0000\u0086\u0087\n\u0001\u0000\u0000\u0087\u0088\u0005\u001d"+ - "\u0000\u0000\u0088\u008a\u0003\u0006\u0003\u0000\u0089\u0086\u0001\u0000"+ - "\u0000\u0000\u008a\u008d\u0001\u0000\u0000\u0000\u008b\u0089\u0001\u0000"+ - "\u0000\u0000\u008b\u008c\u0001\u0000\u0000\u0000\u008c\u0003\u0001\u0000"+ - "\u0000\u0000\u008d\u008b\u0001\u0000\u0000\u0000\u008e\u0095\u0003j5\u0000"+ - "\u008f\u0095\u0003\"\u0011\u0000\u0090\u0095\u0003\u001c\u000e\u0000\u0091"+ - "\u0095\u0003n7\u0000\u0092\u0093\u0004\u0002\u0001\u0000\u0093\u0095\u0003"+ - ",\u0016\u0000\u0094\u008e\u0001\u0000\u0000\u0000\u0094\u008f\u0001\u0000"+ - "\u0000\u0000\u0094\u0090\u0001\u0000\u0000\u0000\u0094\u0091\u0001\u0000"+ - "\u0000\u0000\u0094\u0092\u0001\u0000\u0000\u0000\u0095\u0005\u0001\u0000"+ - "\u0000\u0000\u0096\u00a9\u0003.\u0017\u0000\u0097\u00a9\u0003\b\u0004"+ - "\u0000\u0098\u00a9\u0003L&\u0000\u0099\u00a9\u0003F#\u0000\u009a\u00a9"+ - "\u00030\u0018\u0000\u009b\u00a9\u0003H$\u0000\u009c\u00a9\u0003N\'\u0000"+ - "\u009d\u00a9\u0003P(\u0000\u009e\u00a9\u0003T*\u0000\u009f\u00a9\u0003"+ - "V+\u0000\u00a0\u00a9\u0003p8\u0000\u00a1\u00a9\u0003X,\u0000\u00a2\u00a3"+ - "\u0004\u0003\u0002\u0000\u00a3\u00a9\u0003v;\u0000\u00a4\u00a5\u0004\u0003"+ - "\u0003\u0000\u00a5\u00a9\u0003t:\u0000\u00a6\u00a7\u0004\u0003\u0004\u0000"+ - "\u00a7\u00a9\u0003x<\u0000\u00a8\u0096\u0001\u0000\u0000\u0000\u00a8\u0097"+ - "\u0001\u0000\u0000\u0000\u00a8\u0098\u0001\u0000\u0000\u0000\u00a8\u0099"+ - "\u0001\u0000\u0000\u0000\u00a8\u009a\u0001\u0000\u0000\u0000\u00a8\u009b"+ - "\u0001\u0000\u0000\u0000\u00a8\u009c\u0001\u0000\u0000\u0000\u00a8\u009d"+ - "\u0001\u0000\u0000\u0000\u00a8\u009e\u0001\u0000\u0000\u0000\u00a8\u009f"+ - "\u0001\u0000\u0000\u0000\u00a8\u00a0\u0001\u0000\u0000\u0000\u00a8\u00a1"+ - "\u0001\u0000\u0000\u0000\u00a8\u00a2\u0001\u0000\u0000\u0000\u00a8\u00a4"+ - "\u0001\u0000\u0000\u0000\u00a8\u00a6\u0001\u0000\u0000\u0000\u00a9\u0007"+ - "\u0001\u0000\u0000\u0000\u00aa\u00ab\u0005\u0010\u0000\u0000\u00ab\u00ac"+ - "\u0003\n\u0005\u0000\u00ac\t\u0001\u0000\u0000\u0000\u00ad\u00ae\u0006"+ - "\u0005\uffff\uffff\u0000\u00ae\u00af\u00051\u0000\u0000\u00af\u00cb\u0003"+ - "\n\u0005\b\u00b0\u00cb\u0003\u0010\b\u0000\u00b1\u00cb\u0003\f\u0006\u0000"+ - "\u00b2\u00b4\u0003\u0010\b\u0000\u00b3\u00b5\u00051\u0000\u0000\u00b4"+ - "\u00b3\u0001\u0000\u0000\u0000\u00b4\u00b5\u0001\u0000\u0000\u0000\u00b5"+ - "\u00b6\u0001\u0000\u0000\u0000\u00b6\u00b7\u0005,\u0000\u0000\u00b7\u00b8"+ - "\u00050\u0000\u0000\u00b8\u00bd\u0003\u0010\b\u0000\u00b9\u00ba\u0005"+ - "\'\u0000\u0000\u00ba\u00bc\u0003\u0010\b\u0000\u00bb\u00b9\u0001\u0000"+ - "\u0000\u0000\u00bc\u00bf\u0001\u0000\u0000\u0000\u00bd\u00bb\u0001\u0000"+ - "\u0000\u0000\u00bd\u00be\u0001\u0000\u0000\u0000\u00be\u00c0\u0001\u0000"+ - "\u0000\u0000\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c1\u00057\u0000"+ - "\u0000\u00c1\u00cb\u0001\u0000\u0000\u0000\u00c2\u00c3\u0003\u0010\b\u0000"+ - "\u00c3\u00c5\u0005-\u0000\u0000\u00c4\u00c6\u00051\u0000\u0000\u00c5\u00c4"+ - "\u0001\u0000\u0000\u0000\u00c5\u00c6\u0001\u0000\u0000\u0000\u00c6\u00c7"+ - "\u0001\u0000\u0000\u0000\u00c7\u00c8\u00052\u0000\u0000\u00c8\u00cb\u0001"+ - "\u0000\u0000\u0000\u00c9\u00cb\u0003\u000e\u0007\u0000\u00ca\u00ad\u0001"+ - "\u0000\u0000\u0000\u00ca\u00b0\u0001\u0000\u0000\u0000\u00ca\u00b1\u0001"+ - "\u0000\u0000\u0000\u00ca\u00b2\u0001\u0000\u0000\u0000\u00ca\u00c2\u0001"+ - "\u0000\u0000\u0000\u00ca\u00c9\u0001\u0000\u0000\u0000\u00cb\u00d4\u0001"+ - "\u0000\u0000\u0000\u00cc\u00cd\n\u0005\u0000\u0000\u00cd\u00ce\u0005\""+ - "\u0000\u0000\u00ce\u00d3\u0003\n\u0005\u0006\u00cf\u00d0\n\u0004\u0000"+ - "\u0000\u00d0\u00d1\u00054\u0000\u0000\u00d1\u00d3\u0003\n\u0005\u0005"+ - "\u00d2\u00cc\u0001\u0000\u0000\u0000\u00d2\u00cf\u0001\u0000\u0000\u0000"+ - "\u00d3\u00d6\u0001\u0000\u0000\u0000\u00d4\u00d2\u0001\u0000\u0000\u0000"+ - "\u00d4\u00d5\u0001\u0000\u0000\u0000\u00d5\u000b\u0001\u0000\u0000\u0000"+ - "\u00d6\u00d4\u0001\u0000\u0000\u0000\u00d7\u00d9\u0003\u0010\b\u0000\u00d8"+ - "\u00da\u00051\u0000\u0000\u00d9\u00d8\u0001\u0000\u0000\u0000\u00d9\u00da"+ - "\u0001\u0000\u0000\u0000\u00da\u00db\u0001\u0000\u0000\u0000\u00db\u00dc"+ - "\u0005/\u0000\u0000\u00dc\u00dd\u0003f3\u0000\u00dd\u00e6\u0001\u0000"+ - "\u0000\u0000\u00de\u00e0\u0003\u0010\b\u0000\u00df\u00e1\u00051\u0000"+ - "\u0000\u00e0\u00df\u0001\u0000\u0000\u0000\u00e0\u00e1\u0001\u0000\u0000"+ - "\u0000\u00e1\u00e2\u0001\u0000\u0000\u0000\u00e2\u00e3\u00056\u0000\u0000"+ - "\u00e3\u00e4\u0003f3\u0000\u00e4\u00e6\u0001\u0000\u0000\u0000\u00e5\u00d7"+ - "\u0001\u0000\u0000\u0000\u00e5\u00de\u0001\u0000\u0000\u0000\u00e6\r\u0001"+ - "\u0000\u0000\u0000\u00e7\u00ea\u00036\u001b\u0000\u00e8\u00e9\u0005%\u0000"+ - "\u0000\u00e9\u00eb\u0003\u001a\r\u0000\u00ea\u00e8\u0001\u0000\u0000\u0000"+ - "\u00ea\u00eb\u0001\u0000\u0000\u0000\u00eb\u00ec\u0001\u0000\u0000\u0000"+ - "\u00ec\u00ed\u0005&\u0000\u0000\u00ed\u00ee\u0003@ \u0000\u00ee\u000f"+ - "\u0001\u0000\u0000\u0000\u00ef\u00f5\u0003\u0012\t\u0000\u00f0\u00f1\u0003"+ - "\u0012\t\u0000\u00f1\u00f2\u0003h4\u0000\u00f2\u00f3\u0003\u0012\t\u0000"+ - "\u00f3\u00f5\u0001\u0000\u0000\u0000\u00f4\u00ef\u0001\u0000\u0000\u0000"+ - "\u00f4\u00f0\u0001\u0000\u0000\u0000\u00f5\u0011\u0001\u0000\u0000\u0000"+ - "\u00f6\u00f7\u0006\t\uffff\uffff\u0000\u00f7\u00fb\u0003\u0014\n\u0000"+ - "\u00f8\u00f9\u0007\u0000\u0000\u0000\u00f9\u00fb\u0003\u0012\t\u0003\u00fa"+ - "\u00f6\u0001\u0000\u0000\u0000\u00fa\u00f8\u0001\u0000\u0000\u0000\u00fb"+ - "\u0104\u0001\u0000\u0000\u0000\u00fc\u00fd\n\u0002\u0000\u0000\u00fd\u00fe"+ - "\u0007\u0001\u0000\u0000\u00fe\u0103\u0003\u0012\t\u0003\u00ff\u0100\n"+ - "\u0001\u0000\u0000\u0100\u0101\u0007\u0000\u0000\u0000\u0101\u0103\u0003"+ - "\u0012\t\u0002\u0102\u00fc\u0001\u0000\u0000\u0000\u0102\u00ff\u0001\u0000"+ - "\u0000\u0000\u0103\u0106\u0001\u0000\u0000\u0000\u0104\u0102\u0001\u0000"+ - "\u0000\u0000\u0104\u0105\u0001\u0000\u0000\u0000\u0105\u0013\u0001\u0000"+ - "\u0000\u0000\u0106\u0104\u0001\u0000\u0000\u0000\u0107\u0108\u0006\n\uffff"+ - "\uffff\u0000\u0108\u0110\u0003@ \u0000\u0109\u0110\u00036\u001b\u0000"+ - "\u010a\u0110\u0003\u0016\u000b\u0000\u010b\u010c\u00050\u0000\u0000\u010c"+ - "\u010d\u0003\n\u0005\u0000\u010d\u010e\u00057\u0000\u0000\u010e\u0110"+ - "\u0001\u0000\u0000\u0000\u010f\u0107\u0001\u0000\u0000\u0000\u010f\u0109"+ - "\u0001\u0000\u0000\u0000\u010f\u010a\u0001\u0000\u0000\u0000\u010f\u010b"+ - "\u0001\u0000\u0000\u0000\u0110\u0116\u0001\u0000\u0000\u0000\u0111\u0112"+ - "\n\u0001\u0000\u0000\u0112\u0113\u0005%\u0000\u0000\u0113\u0115\u0003"+ - "\u001a\r\u0000\u0114\u0111\u0001\u0000\u0000\u0000\u0115\u0118\u0001\u0000"+ - "\u0000\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0116\u0117\u0001\u0000"+ - "\u0000\u0000\u0117\u0015\u0001\u0000\u0000\u0000\u0118\u0116\u0001\u0000"+ - "\u0000\u0000\u0119\u011a\u0003\u0018\f\u0000\u011a\u0124\u00050\u0000"+ - "\u0000\u011b\u0125\u0005B\u0000\u0000\u011c\u0121\u0003\n\u0005\u0000"+ - "\u011d\u011e\u0005\'\u0000\u0000\u011e\u0120\u0003\n\u0005\u0000\u011f"+ - "\u011d\u0001\u0000\u0000\u0000\u0120\u0123\u0001\u0000\u0000\u0000\u0121"+ - "\u011f\u0001\u0000\u0000\u0000\u0121\u0122\u0001\u0000\u0000\u0000\u0122"+ - "\u0125\u0001\u0000\u0000\u0000\u0123\u0121\u0001\u0000\u0000\u0000\u0124"+ - "\u011b\u0001\u0000\u0000\u0000\u0124\u011c\u0001\u0000\u0000\u0000\u0124"+ - "\u0125\u0001\u0000\u0000\u0000\u0125\u0126\u0001\u0000\u0000\u0000\u0126"+ - "\u0127\u00057\u0000\u0000\u0127\u0017\u0001\u0000\u0000\u0000\u0128\u0129"+ - "\u0003D\"\u0000\u0129\u0019\u0001\u0000\u0000\u0000\u012a\u012b\u0003"+ - "<\u001e\u0000\u012b\u001b\u0001\u0000\u0000\u0000\u012c\u012d\u0005\f"+ - "\u0000\u0000\u012d\u012e\u0003\u001e\u000f\u0000\u012e\u001d\u0001\u0000"+ - "\u0000\u0000\u012f\u0134\u0003 \u0010\u0000\u0130\u0131\u0005\'\u0000"+ - "\u0000\u0131\u0133\u0003 \u0010\u0000\u0132\u0130\u0001\u0000\u0000\u0000"+ - "\u0133\u0136\u0001\u0000\u0000\u0000\u0134\u0132\u0001\u0000\u0000\u0000"+ - "\u0134\u0135\u0001\u0000\u0000\u0000\u0135\u001f\u0001\u0000\u0000\u0000"+ - "\u0136\u0134\u0001\u0000\u0000\u0000\u0137\u0138\u00036\u001b\u0000\u0138"+ - "\u0139\u0005$\u0000\u0000\u0139\u013b\u0001\u0000\u0000\u0000\u013a\u0137"+ - "\u0001\u0000\u0000\u0000\u013a\u013b\u0001\u0000\u0000\u0000\u013b\u013c"+ - "\u0001\u0000\u0000\u0000\u013c\u013d\u0003\n\u0005\u0000\u013d!\u0001"+ - "\u0000\u0000\u0000\u013e\u013f\u0005\u0006\u0000\u0000\u013f\u0144\u0003"+ - "$\u0012\u0000\u0140\u0141\u0005\'\u0000\u0000\u0141\u0143\u0003$\u0012"+ - "\u0000\u0142\u0140\u0001\u0000\u0000\u0000\u0143\u0146\u0001\u0000\u0000"+ - "\u0000\u0144\u0142\u0001\u0000\u0000\u0000\u0144\u0145\u0001\u0000\u0000"+ - "\u0000\u0145\u0148\u0001\u0000\u0000\u0000\u0146\u0144\u0001\u0000\u0000"+ - "\u0000\u0147\u0149\u0003*\u0015\u0000\u0148\u0147\u0001\u0000\u0000\u0000"+ - "\u0148\u0149\u0001\u0000\u0000\u0000\u0149#\u0001\u0000\u0000\u0000\u014a"+ - "\u014b\u0003&\u0013\u0000\u014b\u014c\u0005&\u0000\u0000\u014c\u014e\u0001"+ - "\u0000\u0000\u0000\u014d\u014a\u0001\u0000\u0000\u0000\u014d\u014e\u0001"+ - "\u0000\u0000\u0000\u014e\u014f\u0001\u0000\u0000\u0000\u014f\u0150\u0003"+ - "(\u0014\u0000\u0150%\u0001\u0000\u0000\u0000\u0151\u0152\u0005Q\u0000"+ - "\u0000\u0152\'\u0001\u0000\u0000\u0000\u0153\u0154\u0007\u0002\u0000\u0000"+ - "\u0154)\u0001\u0000\u0000\u0000\u0155\u0156\u0005P\u0000\u0000\u0156\u015b"+ - "\u0005Q\u0000\u0000\u0157\u0158\u0005\'\u0000\u0000\u0158\u015a\u0005"+ - "Q\u0000\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u015a\u015d\u0001\u0000"+ - "\u0000\u0000\u015b\u0159\u0001\u0000\u0000\u0000\u015b\u015c\u0001\u0000"+ - "\u0000\u0000\u015c+\u0001\u0000\u0000\u0000\u015d\u015b\u0001\u0000\u0000"+ - "\u0000\u015e\u015f\u0005\u0013\u0000\u0000\u015f\u0164\u0003$\u0012\u0000"+ - "\u0160\u0161\u0005\'\u0000\u0000\u0161\u0163\u0003$\u0012\u0000\u0162"+ - "\u0160\u0001\u0000\u0000\u0000\u0163\u0166\u0001\u0000\u0000\u0000\u0164"+ - "\u0162\u0001\u0000\u0000\u0000\u0164\u0165\u0001\u0000\u0000\u0000\u0165"+ - "\u0168\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000\u0000\u0000\u0167"+ - "\u0169\u00032\u0019\u0000\u0168\u0167\u0001\u0000\u0000\u0000\u0168\u0169"+ - "\u0001\u0000\u0000\u0000\u0169\u016c\u0001\u0000\u0000\u0000\u016a\u016b"+ - "\u0005!\u0000\u0000\u016b\u016d\u0003\u001e\u000f\u0000\u016c\u016a\u0001"+ - "\u0000\u0000\u0000\u016c\u016d\u0001\u0000\u0000\u0000\u016d-\u0001\u0000"+ - "\u0000\u0000\u016e\u016f\u0005\u0004\u0000\u0000\u016f\u0170\u0003\u001e"+ - "\u000f\u0000\u0170/\u0001\u0000\u0000\u0000\u0171\u0173\u0005\u000f\u0000"+ - "\u0000\u0172\u0174\u00032\u0019\u0000\u0173\u0172\u0001\u0000\u0000\u0000"+ - "\u0173\u0174\u0001\u0000\u0000\u0000\u0174\u0177\u0001\u0000\u0000\u0000"+ - "\u0175\u0176\u0005!\u0000\u0000\u0176\u0178\u0003\u001e\u000f\u0000\u0177"+ - "\u0175\u0001\u0000\u0000\u0000\u0177\u0178\u0001\u0000\u0000\u0000\u0178"+ - "1\u0001\u0000\u0000\u0000\u0179\u017e\u00034\u001a\u0000\u017a\u017b\u0005"+ - "\'\u0000\u0000\u017b\u017d\u00034\u001a\u0000\u017c\u017a\u0001\u0000"+ - "\u0000\u0000\u017d\u0180\u0001\u0000\u0000\u0000\u017e\u017c\u0001\u0000"+ - "\u0000\u0000\u017e\u017f\u0001\u0000\u0000\u0000\u017f3\u0001\u0000\u0000"+ - "\u0000\u0180\u017e\u0001\u0000\u0000\u0000\u0181\u0184\u0003 \u0010\u0000"+ - "\u0182\u0183\u0005\u0010\u0000\u0000\u0183\u0185\u0003\n\u0005\u0000\u0184"+ - "\u0182\u0001\u0000\u0000\u0000\u0184\u0185\u0001\u0000\u0000\u0000\u0185"+ - "5\u0001\u0000\u0000\u0000\u0186\u018b\u0003D\"\u0000\u0187\u0188\u0005"+ - ")\u0000\u0000\u0188\u018a\u0003D\"\u0000\u0189\u0187\u0001\u0000\u0000"+ - "\u0000\u018a\u018d\u0001\u0000\u0000\u0000\u018b\u0189\u0001\u0000\u0000"+ - "\u0000\u018b\u018c\u0001\u0000\u0000\u0000\u018c7\u0001\u0000\u0000\u0000"+ - "\u018d\u018b\u0001\u0000\u0000\u0000\u018e\u0193\u0003>\u001f\u0000\u018f"+ - "\u0190\u0005)\u0000\u0000\u0190\u0192\u0003>\u001f\u0000\u0191\u018f\u0001"+ - "\u0000\u0000\u0000\u0192\u0195\u0001\u0000\u0000\u0000\u0193\u0191\u0001"+ - "\u0000\u0000\u0000\u0193\u0194\u0001\u0000\u0000\u0000\u01949\u0001\u0000"+ - "\u0000\u0000\u0195\u0193\u0001\u0000\u0000\u0000\u0196\u019b\u00038\u001c"+ - "\u0000\u0197\u0198\u0005\'\u0000\u0000\u0198\u019a\u00038\u001c\u0000"+ - "\u0199\u0197\u0001\u0000\u0000\u0000\u019a\u019d\u0001\u0000\u0000\u0000"+ - "\u019b\u0199\u0001\u0000\u0000\u0000\u019b\u019c\u0001\u0000\u0000\u0000"+ - "\u019c;\u0001\u0000\u0000\u0000\u019d\u019b\u0001\u0000\u0000\u0000\u019e"+ - "\u019f\u0007\u0003\u0000\u0000\u019f=\u0001\u0000\u0000\u0000\u01a0\u01a4"+ - "\u0005U\u0000\u0000\u01a1\u01a2\u0004\u001f\n\u0000\u01a2\u01a4\u0003"+ - "B!\u0000\u01a3\u01a0\u0001\u0000\u0000\u0000\u01a3\u01a1\u0001\u0000\u0000"+ - "\u0000\u01a4?\u0001\u0000\u0000\u0000\u01a5\u01d0\u00052\u0000\u0000\u01a6"+ - "\u01a7\u0003d2\u0000\u01a7\u01a8\u0005H\u0000\u0000\u01a8\u01d0\u0001"+ - "\u0000\u0000\u0000\u01a9\u01d0\u0003b1\u0000\u01aa\u01d0\u0003d2\u0000"+ - "\u01ab\u01d0\u0003^/\u0000\u01ac\u01d0\u0003B!\u0000\u01ad\u01d0\u0003"+ - "f3\u0000\u01ae\u01af\u0005F\u0000\u0000\u01af\u01b4\u0003`0\u0000\u01b0"+ - "\u01b1\u0005\'\u0000\u0000\u01b1\u01b3\u0003`0\u0000\u01b2\u01b0\u0001"+ - "\u0000\u0000\u0000\u01b3\u01b6\u0001\u0000\u0000\u0000\u01b4\u01b2\u0001"+ - "\u0000\u0000\u0000\u01b4\u01b5\u0001\u0000\u0000\u0000\u01b5\u01b7\u0001"+ - "\u0000\u0000\u0000\u01b6\u01b4\u0001\u0000\u0000\u0000\u01b7\u01b8\u0005"+ - "G\u0000\u0000\u01b8\u01d0\u0001\u0000\u0000\u0000\u01b9\u01ba\u0005F\u0000"+ - "\u0000\u01ba\u01bf\u0003^/\u0000\u01bb\u01bc\u0005\'\u0000\u0000\u01bc"+ - "\u01be\u0003^/\u0000\u01bd\u01bb\u0001\u0000\u0000\u0000\u01be\u01c1\u0001"+ - "\u0000\u0000\u0000\u01bf\u01bd\u0001\u0000\u0000\u0000\u01bf\u01c0\u0001"+ - "\u0000\u0000\u0000\u01c0\u01c2\u0001\u0000\u0000\u0000\u01c1\u01bf\u0001"+ - "\u0000\u0000\u0000\u01c2\u01c3\u0005G\u0000\u0000\u01c3\u01d0\u0001\u0000"+ - "\u0000\u0000\u01c4\u01c5\u0005F\u0000\u0000\u01c5\u01ca\u0003f3\u0000"+ - "\u01c6\u01c7\u0005\'\u0000\u0000\u01c7\u01c9\u0003f3\u0000\u01c8\u01c6"+ - "\u0001\u0000\u0000\u0000\u01c9\u01cc\u0001\u0000\u0000\u0000\u01ca\u01c8"+ - "\u0001\u0000\u0000\u0000\u01ca\u01cb\u0001\u0000\u0000\u0000\u01cb\u01cd"+ - "\u0001\u0000\u0000\u0000\u01cc\u01ca\u0001\u0000\u0000\u0000\u01cd\u01ce"+ - "\u0005G\u0000\u0000\u01ce\u01d0\u0001\u0000\u0000\u0000\u01cf\u01a5\u0001"+ - "\u0000\u0000\u0000\u01cf\u01a6\u0001\u0000\u0000\u0000\u01cf\u01a9\u0001"+ - "\u0000\u0000\u0000\u01cf\u01aa\u0001\u0000\u0000\u0000\u01cf\u01ab\u0001"+ - "\u0000\u0000\u0000\u01cf\u01ac\u0001\u0000\u0000\u0000\u01cf\u01ad\u0001"+ - "\u0000\u0000\u0000\u01cf\u01ae\u0001\u0000\u0000\u0000\u01cf\u01b9\u0001"+ - "\u0000\u0000\u0000\u01cf\u01c4\u0001\u0000\u0000\u0000\u01d0A\u0001\u0000"+ - "\u0000\u0000\u01d1\u01d4\u00055\u0000\u0000\u01d2\u01d4\u0005E\u0000\u0000"+ - "\u01d3\u01d1\u0001\u0000\u0000\u0000\u01d3\u01d2\u0001\u0000\u0000\u0000"+ - "\u01d4C\u0001\u0000\u0000\u0000\u01d5\u01d9\u0003<\u001e\u0000\u01d6\u01d7"+ - "\u0004\"\u000b\u0000\u01d7\u01d9\u0003B!\u0000\u01d8\u01d5\u0001\u0000"+ - "\u0000\u0000\u01d8\u01d6\u0001\u0000\u0000\u0000\u01d9E\u0001\u0000\u0000"+ - "\u0000\u01da\u01db\u0005\t\u0000\u0000\u01db\u01dc\u0005\u001f\u0000\u0000"+ - "\u01dcG\u0001\u0000\u0000\u0000\u01dd\u01de\u0005\u000e\u0000\u0000\u01de"+ - "\u01e3\u0003J%\u0000\u01df\u01e0\u0005\'\u0000\u0000\u01e0\u01e2\u0003"+ - "J%\u0000\u01e1\u01df\u0001\u0000\u0000\u0000\u01e2\u01e5\u0001\u0000\u0000"+ - "\u0000\u01e3\u01e1\u0001\u0000\u0000\u0000\u01e3\u01e4\u0001\u0000\u0000"+ - "\u0000\u01e4I\u0001\u0000\u0000\u0000\u01e5\u01e3\u0001\u0000\u0000\u0000"+ - "\u01e6\u01e8\u0003\n\u0005\u0000\u01e7\u01e9\u0007\u0004\u0000\u0000\u01e8"+ - "\u01e7\u0001\u0000\u0000\u0000\u01e8\u01e9\u0001\u0000\u0000\u0000\u01e9"+ - "\u01ec\u0001\u0000\u0000\u0000\u01ea\u01eb\u00053\u0000\u0000\u01eb\u01ed"+ - "\u0007\u0005\u0000\u0000\u01ec\u01ea\u0001\u0000\u0000\u0000\u01ec\u01ed"+ - "\u0001\u0000\u0000\u0000\u01edK\u0001\u0000\u0000\u0000\u01ee\u01ef\u0005"+ - "\b\u0000\u0000\u01ef\u01f0\u0003:\u001d\u0000\u01f0M\u0001\u0000\u0000"+ - "\u0000\u01f1\u01f2\u0005\u0002\u0000\u0000\u01f2\u01f3\u0003:\u001d\u0000"+ - "\u01f3O\u0001\u0000\u0000\u0000\u01f4\u01f5\u0005\u000b\u0000\u0000\u01f5"+ - "\u01fa\u0003R)\u0000\u01f6\u01f7\u0005\'\u0000\u0000\u01f7\u01f9\u0003"+ - "R)\u0000\u01f8\u01f6\u0001\u0000\u0000\u0000\u01f9\u01fc\u0001\u0000\u0000"+ - "\u0000\u01fa\u01f8\u0001\u0000\u0000\u0000\u01fa\u01fb\u0001\u0000\u0000"+ - "\u0000\u01fbQ\u0001\u0000\u0000\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000"+ - "\u01fd\u01fe\u00038\u001c\u0000\u01fe\u01ff\u0005Y\u0000\u0000\u01ff\u0200"+ - "\u00038\u001c\u0000\u0200S\u0001\u0000\u0000\u0000\u0201\u0202\u0005\u0001"+ - "\u0000\u0000\u0202\u0203\u0003\u0014\n\u0000\u0203\u0205\u0003f3\u0000"+ - "\u0204\u0206\u0003Z-\u0000\u0205\u0204\u0001\u0000\u0000\u0000\u0205\u0206"+ - "\u0001\u0000\u0000\u0000\u0206U\u0001\u0000\u0000\u0000\u0207\u0208\u0005"+ - "\u0007\u0000\u0000\u0208\u0209\u0003\u0014\n\u0000\u0209\u020a\u0003f"+ - "3\u0000\u020aW\u0001\u0000\u0000\u0000\u020b\u020c\u0005\n\u0000\u0000"+ - "\u020c\u020d\u00036\u001b\u0000\u020dY\u0001\u0000\u0000\u0000\u020e\u0213"+ - "\u0003\\.\u0000\u020f\u0210\u0005\'\u0000\u0000\u0210\u0212\u0003\\.\u0000"+ - "\u0211\u020f\u0001\u0000\u0000\u0000\u0212\u0215\u0001\u0000\u0000\u0000"+ - "\u0213\u0211\u0001\u0000\u0000\u0000\u0213\u0214\u0001\u0000\u0000\u0000"+ - "\u0214[\u0001\u0000\u0000\u0000\u0215\u0213\u0001\u0000\u0000\u0000\u0216"+ - "\u0217\u0003<\u001e\u0000\u0217\u0218\u0005$\u0000\u0000\u0218\u0219\u0003"+ - "@ \u0000\u0219]\u0001\u0000\u0000\u0000\u021a\u021b\u0007\u0006\u0000"+ - "\u0000\u021b_\u0001\u0000\u0000\u0000\u021c\u021f\u0003b1\u0000\u021d"+ - "\u021f\u0003d2\u0000\u021e\u021c\u0001\u0000\u0000\u0000\u021e\u021d\u0001"+ - "\u0000\u0000\u0000\u021fa\u0001\u0000\u0000\u0000\u0220\u0222\u0007\u0000"+ - "\u0000\u0000\u0221\u0220\u0001\u0000\u0000\u0000\u0221\u0222\u0001\u0000"+ - "\u0000\u0000\u0222\u0223\u0001\u0000\u0000\u0000\u0223\u0224\u0005 \u0000"+ - "\u0000\u0224c\u0001\u0000\u0000\u0000\u0225\u0227\u0007\u0000\u0000\u0000"+ - "\u0226\u0225\u0001\u0000\u0000\u0000\u0226\u0227\u0001\u0000\u0000\u0000"+ - "\u0227\u0228\u0001\u0000\u0000\u0000\u0228\u0229\u0005\u001f\u0000\u0000"+ - "\u0229e\u0001\u0000\u0000\u0000\u022a\u022b\u0005\u001e\u0000\u0000\u022b"+ - "g\u0001\u0000\u0000\u0000\u022c\u022d\u0007\u0007\u0000\u0000\u022di\u0001"+ - "\u0000\u0000\u0000\u022e\u022f\u0005\u0005\u0000\u0000\u022f\u0230\u0003"+ - "l6\u0000\u0230k\u0001\u0000\u0000\u0000\u0231\u0232\u0005F\u0000\u0000"+ - "\u0232\u0233\u0003\u0002\u0001\u0000\u0233\u0234\u0005G\u0000\u0000\u0234"+ - "m\u0001\u0000\u0000\u0000\u0235\u0236\u0005\r\u0000\u0000\u0236\u0237"+ - "\u0005i\u0000\u0000\u0237o\u0001\u0000\u0000\u0000\u0238\u0239\u0005\u0003"+ - "\u0000\u0000\u0239\u023c\u0005_\u0000\u0000\u023a\u023b\u0005]\u0000\u0000"+ - "\u023b\u023d\u00038\u001c\u0000\u023c\u023a\u0001\u0000\u0000\u0000\u023c"+ - "\u023d\u0001\u0000\u0000\u0000\u023d\u0247\u0001\u0000\u0000\u0000\u023e"+ - "\u023f\u0005^\u0000\u0000\u023f\u0244\u0003r9\u0000\u0240\u0241\u0005"+ - "\'\u0000\u0000\u0241\u0243\u0003r9\u0000\u0242\u0240\u0001\u0000\u0000"+ - "\u0000\u0243\u0246\u0001\u0000\u0000\u0000\u0244\u0242\u0001\u0000\u0000"+ - "\u0000\u0244\u0245\u0001\u0000\u0000\u0000\u0245\u0248\u0001\u0000\u0000"+ - "\u0000\u0246\u0244\u0001\u0000\u0000\u0000\u0247\u023e\u0001\u0000\u0000"+ - "\u0000\u0247\u0248\u0001\u0000\u0000\u0000\u0248q\u0001\u0000\u0000\u0000"+ - "\u0249\u024a\u00038\u001c\u0000\u024a\u024b\u0005$\u0000\u0000\u024b\u024d"+ - "\u0001\u0000\u0000\u0000\u024c\u0249\u0001\u0000\u0000\u0000\u024c\u024d"+ - "\u0001\u0000\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e\u024f"+ - "\u00038\u001c\u0000\u024fs\u0001\u0000\u0000\u0000\u0250\u0251\u0005\u0012"+ - "\u0000\u0000\u0251\u0252\u0003$\u0012\u0000\u0252\u0253\u0005]\u0000\u0000"+ - "\u0253\u0254\u0003:\u001d\u0000\u0254u\u0001\u0000\u0000\u0000\u0255\u0256"+ - "\u0005\u0011\u0000\u0000\u0256\u0259\u00032\u0019\u0000\u0257\u0258\u0005"+ - "!\u0000\u0000\u0258\u025a\u0003\u001e\u000f\u0000\u0259\u0257\u0001\u0000"+ - "\u0000\u0000\u0259\u025a\u0001\u0000\u0000\u0000\u025aw\u0001\u0000\u0000"+ - "\u0000\u025b\u025d\u0007\b\u0000\u0000\u025c\u025b\u0001\u0000\u0000\u0000"+ - "\u025c\u025d\u0001\u0000\u0000\u0000\u025d\u025e\u0001\u0000\u0000\u0000"+ - "\u025e\u025f\u0005\u0014\u0000\u0000\u025f\u0260\u0003z=\u0000\u0260\u0261"+ - "\u0003|>\u0000\u0261y\u0001\u0000\u0000\u0000\u0262\u0265\u0003<\u001e"+ - "\u0000\u0263\u0264\u0005Y\u0000\u0000\u0264\u0266\u0003<\u001e\u0000\u0265"+ - "\u0263\u0001\u0000\u0000\u0000\u0265\u0266\u0001\u0000\u0000\u0000\u0266"+ - "{\u0001\u0000\u0000\u0000\u0267\u0268\u0005]\u0000\u0000\u0268\u026d\u0003"+ - "~?\u0000\u0269\u026a\u0005\'\u0000\u0000\u026a\u026c\u0003~?\u0000\u026b"+ - "\u0269\u0001\u0000\u0000\u0000\u026c\u026f\u0001\u0000\u0000\u0000\u026d"+ - "\u026b\u0001\u0000\u0000\u0000\u026d\u026e\u0001\u0000\u0000\u0000\u026e"+ - "}\u0001\u0000\u0000\u0000\u026f\u026d\u0001\u0000\u0000\u0000\u0270\u0271"+ - "\u0003\u0010\b\u0000\u0271\u007f\u0001\u0000\u0000\u0000=\u008b\u0094"+ - "\u00a8\u00b4\u00bd\u00c5\u00ca\u00d2\u00d4\u00d9\u00e0\u00e5\u00ea\u00f4"+ - "\u00fa\u0102\u0104\u010f\u0116\u0121\u0124\u0134\u013a\u0144\u0148\u014d"+ - "\u015b\u0164\u0168\u016c\u0173\u0177\u017e\u0184\u018b\u0193\u019b\u01a3"+ - "\u01b4\u01bf\u01ca\u01cf\u01d3\u01d8\u01e3\u01e8\u01ec\u01fa\u0205\u0213"+ - "\u021e\u0221\u0226\u023c\u0244\u0247\u024c\u0259\u025c\u0265\u026d"; + ";\u0002<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007"+ + "@\u0002A\u0007A\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001\u008e"+ + "\b\u0001\n\u0001\f\u0001\u0091\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002\u0099\b\u0002\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003"+ + "\u00ad\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005"+ + "\u00b9\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0005\u0005\u00c0\b\u0005\n\u0005\f\u0005\u00c3\t\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00ca\b\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00cf\b\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00d7"+ + "\b\u0005\n\u0005\f\u0005\u00da\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006"+ + "\u00de\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ + "\u0003\u0006\u00e5\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006"+ + "\u00ea\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007\u00ef\b"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001"+ + "\b\u0001\b\u0003\b\u00f9\b\b\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00ff"+ + "\b\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0005\t\u0107\b\t"+ + "\n\t\f\t\u010a\t\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0003\n\u0114\b\n\u0001\n\u0001\n\u0001\n\u0005\n\u0119\b\n"+ + "\n\n\f\n\u011c\t\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001"+ + "\u000b\u0001\u000b\u0005\u000b\u0124\b\u000b\n\u000b\f\u000b\u0127\t\u000b"+ + "\u0001\u000b\u0001\u000b\u0003\u000b\u012b\b\u000b\u0003\u000b\u012d\b"+ + "\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\r\u0001\r\u0001\r"+ + "\u0001\r\u0001\r\u0005\r\u0138\b\r\n\r\f\r\u013b\t\r\u0001\r\u0001\r\u0001"+ + "\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f\u0001"+ + "\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0005"+ + "\u0011\u014b\b\u0011\n\u0011\f\u0011\u014e\t\u0011\u0001\u0012\u0001\u0012"+ + "\u0001\u0012\u0003\u0012\u0153\b\u0012\u0001\u0012\u0001\u0012\u0001\u0013"+ + "\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u015b\b\u0013\n\u0013"+ + "\f\u0013\u015e\t\u0013\u0001\u0013\u0003\u0013\u0161\b\u0013\u0001\u0014"+ + "\u0001\u0014\u0001\u0014\u0003\u0014\u0166\b\u0014\u0001\u0014\u0001\u0014"+ + "\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017"+ + "\u0001\u0017\u0001\u0017\u0005\u0017\u0172\b\u0017\n\u0017\f\u0017\u0175"+ + "\t\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0005\u0018\u017b"+ + "\b\u0018\n\u0018\f\u0018\u017e\t\u0018\u0001\u0018\u0003\u0018\u0181\b"+ + "\u0018\u0001\u0018\u0001\u0018\u0003\u0018\u0185\b\u0018\u0001\u0019\u0001"+ + "\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0003\u001a\u018c\b\u001a\u0001"+ + "\u001a\u0001\u001a\u0003\u001a\u0190\b\u001a\u0001\u001b\u0001\u001b\u0001"+ + "\u001b\u0005\u001b\u0195\b\u001b\n\u001b\f\u001b\u0198\t\u001b\u0001\u001c"+ + "\u0001\u001c\u0001\u001c\u0003\u001c\u019d\b\u001c\u0001\u001d\u0001\u001d"+ + "\u0001\u001d\u0005\u001d\u01a2\b\u001d\n\u001d\f\u001d\u01a5\t\u001d\u0001"+ + "\u001e\u0001\u001e\u0001\u001e\u0005\u001e\u01aa\b\u001e\n\u001e\f\u001e"+ + "\u01ad\t\u001e\u0001\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01b2\b"+ + "\u001f\n\u001f\f\u001f\u01b5\t\u001f\u0001 \u0001 \u0001!\u0001!\u0001"+ + "!\u0003!\u01bc\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001"+ + "\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005\"\u01cb\b\"\n"+ + "\"\f\"\u01ce\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\"\u0005"+ + "\"\u01d6\b\"\n\"\f\"\u01d9\t\"\u0001\"\u0001\"\u0001\"\u0001\"\u0001\""+ + "\u0001\"\u0005\"\u01e1\b\"\n\"\f\"\u01e4\t\"\u0001\"\u0001\"\u0003\"\u01e8"+ + "\b\"\u0001#\u0001#\u0003#\u01ec\b#\u0001$\u0001$\u0001$\u0003$\u01f1\b"+ + "$\u0001%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0005&\u01fa\b&\n&"+ + "\f&\u01fd\t&\u0001\'\u0001\'\u0003\'\u0201\b\'\u0001\'\u0001\'\u0003\'"+ + "\u0205\b\'\u0001(\u0001(\u0001(\u0001)\u0001)\u0001)\u0001*\u0001*\u0001"+ + "*\u0001*\u0005*\u0211\b*\n*\f*\u0214\t*\u0001+\u0001+\u0001+\u0001+\u0001"+ + ",\u0001,\u0001,\u0001,\u0003,\u021e\b,\u0001-\u0001-\u0001-\u0001-\u0001"+ + ".\u0001.\u0001.\u0001/\u0001/\u0001/\u0005/\u022a\b/\n/\f/\u022d\t/\u0001"+ + "0\u00010\u00010\u00010\u00011\u00011\u00012\u00012\u00032\u0237\b2\u0001"+ + "3\u00033\u023a\b3\u00013\u00013\u00014\u00034\u023f\b4\u00014\u00014\u0001"+ + "5\u00015\u00016\u00016\u00017\u00017\u00017\u00018\u00018\u00018\u0001"+ + "8\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001:\u0003:\u0255\b:\u0001"+ + ":\u0001:\u0001:\u0001:\u0005:\u025b\b:\n:\f:\u025e\t:\u0003:\u0260\b:"+ + "\u0001;\u0001;\u0001;\u0003;\u0265\b;\u0001;\u0001;\u0001<\u0001<\u0001"+ + "<\u0001<\u0001<\u0001=\u0001=\u0001=\u0001=\u0003=\u0272\b=\u0001>\u0003"+ + ">\u0275\b>\u0001>\u0001>\u0001>\u0001>\u0001?\u0001?\u0001?\u0003?\u027e"+ + "\b?\u0001@\u0001@\u0001@\u0001@\u0005@\u0284\b@\n@\f@\u0287\t@\u0001A"+ + "\u0001A\u0001A\u0000\u0004\u0002\n\u0012\u0014B\u0000\u0002\u0004\u0006"+ + "\b\n\f\u000e\u0010\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,."+ + "02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|~\u0080\u0082\u0000\t\u0001\u0000"+ + "@A\u0001\u0000BD\u0002\u0000\u001e\u001eSS\u0001\u0000JK\u0002\u0000#"+ + "#((\u0002\u0000++..\u0002\u0000**88\u0002\u000099;?\u0001\u0000\u0016"+ + "\u0018\u02a6\u0000\u0084\u0001\u0000\u0000\u0000\u0002\u0087\u0001\u0000"+ + "\u0000\u0000\u0004\u0098\u0001\u0000\u0000\u0000\u0006\u00ac\u0001\u0000"+ + "\u0000\u0000\b\u00ae\u0001\u0000\u0000\u0000\n\u00ce\u0001\u0000\u0000"+ + "\u0000\f\u00e9\u0001\u0000\u0000\u0000\u000e\u00eb\u0001\u0000\u0000\u0000"+ + "\u0010\u00f8\u0001\u0000\u0000\u0000\u0012\u00fe\u0001\u0000\u0000\u0000"+ + "\u0014\u0113\u0001\u0000\u0000\u0000\u0016\u011d\u0001\u0000\u0000\u0000"+ + "\u0018\u0130\u0001\u0000\u0000\u0000\u001a\u0132\u0001\u0000\u0000\u0000"+ + "\u001c\u013e\u0001\u0000\u0000\u0000\u001e\u0142\u0001\u0000\u0000\u0000"+ + " \u0144\u0001\u0000\u0000\u0000\"\u0147\u0001\u0000\u0000\u0000$\u0152"+ + "\u0001\u0000\u0000\u0000&\u0156\u0001\u0000\u0000\u0000(\u0165\u0001\u0000"+ + "\u0000\u0000*\u0169\u0001\u0000\u0000\u0000,\u016b\u0001\u0000\u0000\u0000"+ + ".\u016d\u0001\u0000\u0000\u00000\u0176\u0001\u0000\u0000\u00002\u0186"+ + "\u0001\u0000\u0000\u00004\u0189\u0001\u0000\u0000\u00006\u0191\u0001\u0000"+ + "\u0000\u00008\u0199\u0001\u0000\u0000\u0000:\u019e\u0001\u0000\u0000\u0000"+ + "<\u01a6\u0001\u0000\u0000\u0000>\u01ae\u0001\u0000\u0000\u0000@\u01b6"+ + "\u0001\u0000\u0000\u0000B\u01bb\u0001\u0000\u0000\u0000D\u01e7\u0001\u0000"+ + "\u0000\u0000F\u01eb\u0001\u0000\u0000\u0000H\u01f0\u0001\u0000\u0000\u0000"+ + "J\u01f2\u0001\u0000\u0000\u0000L\u01f5\u0001\u0000\u0000\u0000N\u01fe"+ + "\u0001\u0000\u0000\u0000P\u0206\u0001\u0000\u0000\u0000R\u0209\u0001\u0000"+ + "\u0000\u0000T\u020c\u0001\u0000\u0000\u0000V\u0215\u0001\u0000\u0000\u0000"+ + "X\u0219\u0001\u0000\u0000\u0000Z\u021f\u0001\u0000\u0000\u0000\\\u0223"+ + "\u0001\u0000\u0000\u0000^\u0226\u0001\u0000\u0000\u0000`\u022e\u0001\u0000"+ + "\u0000\u0000b\u0232\u0001\u0000\u0000\u0000d\u0236\u0001\u0000\u0000\u0000"+ + "f\u0239\u0001\u0000\u0000\u0000h\u023e\u0001\u0000\u0000\u0000j\u0242"+ + "\u0001\u0000\u0000\u0000l\u0244\u0001\u0000\u0000\u0000n\u0246\u0001\u0000"+ + "\u0000\u0000p\u0249\u0001\u0000\u0000\u0000r\u024d\u0001\u0000\u0000\u0000"+ + "t\u0250\u0001\u0000\u0000\u0000v\u0264\u0001\u0000\u0000\u0000x\u0268"+ + "\u0001\u0000\u0000\u0000z\u026d\u0001\u0000\u0000\u0000|\u0274\u0001\u0000"+ + "\u0000\u0000~\u027a\u0001\u0000\u0000\u0000\u0080\u027f\u0001\u0000\u0000"+ + "\u0000\u0082\u0288\u0001\u0000\u0000\u0000\u0084\u0085\u0003\u0002\u0001"+ + "\u0000\u0085\u0086\u0005\u0000\u0000\u0001\u0086\u0001\u0001\u0000\u0000"+ + "\u0000\u0087\u0088\u0006\u0001\uffff\uffff\u0000\u0088\u0089\u0003\u0004"+ + "\u0002\u0000\u0089\u008f\u0001\u0000\u0000\u0000\u008a\u008b\n\u0001\u0000"+ + "\u0000\u008b\u008c\u0005\u001d\u0000\u0000\u008c\u008e\u0003\u0006\u0003"+ + "\u0000\u008d\u008a\u0001\u0000\u0000\u0000\u008e\u0091\u0001\u0000\u0000"+ + "\u0000\u008f\u008d\u0001\u0000\u0000\u0000\u008f\u0090\u0001\u0000\u0000"+ + "\u0000\u0090\u0003\u0001\u0000\u0000\u0000\u0091\u008f\u0001\u0000\u0000"+ + "\u0000\u0092\u0099\u0003n7\u0000\u0093\u0099\u0003&\u0013\u0000\u0094"+ + "\u0099\u0003 \u0010\u0000\u0095\u0099\u0003r9\u0000\u0096\u0097\u0004"+ + "\u0002\u0001\u0000\u0097\u0099\u00030\u0018\u0000\u0098\u0092\u0001\u0000"+ + "\u0000\u0000\u0098\u0093\u0001\u0000\u0000\u0000\u0098\u0094\u0001\u0000"+ + "\u0000\u0000\u0098\u0095\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000"+ + "\u0000\u0000\u0099\u0005\u0001\u0000\u0000\u0000\u009a\u00ad\u00032\u0019"+ + "\u0000\u009b\u00ad\u0003\b\u0004\u0000\u009c\u00ad\u0003P(\u0000\u009d"+ + "\u00ad\u0003J%\u0000\u009e\u00ad\u00034\u001a\u0000\u009f\u00ad\u0003"+ + "L&\u0000\u00a0\u00ad\u0003R)\u0000\u00a1\u00ad\u0003T*\u0000\u00a2\u00ad"+ + "\u0003X,\u0000\u00a3\u00ad\u0003Z-\u0000\u00a4\u00ad\u0003t:\u0000\u00a5"+ + "\u00ad\u0003\\.\u0000\u00a6\u00a7\u0004\u0003\u0002\u0000\u00a7\u00ad"+ + "\u0003z=\u0000\u00a8\u00a9\u0004\u0003\u0003\u0000\u00a9\u00ad\u0003x"+ + "<\u0000\u00aa\u00ab\u0004\u0003\u0004\u0000\u00ab\u00ad\u0003|>\u0000"+ + "\u00ac\u009a\u0001\u0000\u0000\u0000\u00ac\u009b\u0001\u0000\u0000\u0000"+ + "\u00ac\u009c\u0001\u0000\u0000\u0000\u00ac\u009d\u0001\u0000\u0000\u0000"+ + "\u00ac\u009e\u0001\u0000\u0000\u0000\u00ac\u009f\u0001\u0000\u0000\u0000"+ + "\u00ac\u00a0\u0001\u0000\u0000\u0000\u00ac\u00a1\u0001\u0000\u0000\u0000"+ + "\u00ac\u00a2\u0001\u0000\u0000\u0000\u00ac\u00a3\u0001\u0000\u0000\u0000"+ + "\u00ac\u00a4\u0001\u0000\u0000\u0000\u00ac\u00a5\u0001\u0000\u0000\u0000"+ + "\u00ac\u00a6\u0001\u0000\u0000\u0000\u00ac\u00a8\u0001\u0000\u0000\u0000"+ + "\u00ac\u00aa\u0001\u0000\u0000\u0000\u00ad\u0007\u0001\u0000\u0000\u0000"+ + "\u00ae\u00af\u0005\u0010\u0000\u0000\u00af\u00b0\u0003\n\u0005\u0000\u00b0"+ + "\t\u0001\u0000\u0000\u0000\u00b1\u00b2\u0006\u0005\uffff\uffff\u0000\u00b2"+ + "\u00b3\u00051\u0000\u0000\u00b3\u00cf\u0003\n\u0005\b\u00b4\u00cf\u0003"+ + "\u0010\b\u0000\u00b5\u00cf\u0003\f\u0006\u0000\u00b6\u00b8\u0003\u0010"+ + "\b\u0000\u00b7\u00b9\u00051\u0000\u0000\u00b8\u00b7\u0001\u0000\u0000"+ + "\u0000\u00b8\u00b9\u0001\u0000\u0000\u0000\u00b9\u00ba\u0001\u0000\u0000"+ + "\u0000\u00ba\u00bb\u0005,\u0000\u0000\u00bb\u00bc\u00050\u0000\u0000\u00bc"+ + "\u00c1\u0003\u0010\b\u0000\u00bd\u00be\u0005\'\u0000\u0000\u00be\u00c0"+ + "\u0003\u0010\b\u0000\u00bf\u00bd\u0001\u0000\u0000\u0000\u00c0\u00c3\u0001"+ + "\u0000\u0000\u0000\u00c1\u00bf\u0001\u0000\u0000\u0000\u00c1\u00c2\u0001"+ + "\u0000\u0000\u0000\u00c2\u00c4\u0001\u0000\u0000\u0000\u00c3\u00c1\u0001"+ + "\u0000\u0000\u0000\u00c4\u00c5\u00057\u0000\u0000\u00c5\u00cf\u0001\u0000"+ + "\u0000\u0000\u00c6\u00c7\u0003\u0010\b\u0000\u00c7\u00c9\u0005-\u0000"+ + "\u0000\u00c8\u00ca\u00051\u0000\u0000\u00c9\u00c8\u0001\u0000\u0000\u0000"+ + "\u00c9\u00ca\u0001\u0000\u0000\u0000\u00ca\u00cb\u0001\u0000\u0000\u0000"+ + "\u00cb\u00cc\u00052\u0000\u0000\u00cc\u00cf\u0001\u0000\u0000\u0000\u00cd"+ + "\u00cf\u0003\u000e\u0007\u0000\u00ce\u00b1\u0001\u0000\u0000\u0000\u00ce"+ + "\u00b4\u0001\u0000\u0000\u0000\u00ce\u00b5\u0001\u0000\u0000\u0000\u00ce"+ + "\u00b6\u0001\u0000\u0000\u0000\u00ce\u00c6\u0001\u0000\u0000\u0000\u00ce"+ + "\u00cd\u0001\u0000\u0000\u0000\u00cf\u00d8\u0001\u0000\u0000\u0000\u00d0"+ + "\u00d1\n\u0005\u0000\u0000\u00d1\u00d2\u0005\"\u0000\u0000\u00d2\u00d7"+ + "\u0003\n\u0005\u0006\u00d3\u00d4\n\u0004\u0000\u0000\u00d4\u00d5\u0005"+ + "4\u0000\u0000\u00d5\u00d7\u0003\n\u0005\u0005\u00d6\u00d0\u0001\u0000"+ + "\u0000\u0000\u00d6\u00d3\u0001\u0000\u0000\u0000\u00d7\u00da\u0001\u0000"+ + "\u0000\u0000\u00d8\u00d6\u0001\u0000\u0000\u0000\u00d8\u00d9\u0001\u0000"+ + "\u0000\u0000\u00d9\u000b\u0001\u0000\u0000\u0000\u00da\u00d8\u0001\u0000"+ + "\u0000\u0000\u00db\u00dd\u0003\u0010\b\u0000\u00dc\u00de\u00051\u0000"+ + "\u0000\u00dd\u00dc\u0001\u0000\u0000\u0000\u00dd\u00de\u0001\u0000\u0000"+ + "\u0000\u00de\u00df\u0001\u0000\u0000\u0000\u00df\u00e0\u0005/\u0000\u0000"+ + "\u00e0\u00e1\u0003j5\u0000\u00e1\u00ea\u0001\u0000\u0000\u0000\u00e2\u00e4"+ + "\u0003\u0010\b\u0000\u00e3\u00e5\u00051\u0000\u0000\u00e4\u00e3\u0001"+ + "\u0000\u0000\u0000\u00e4\u00e5\u0001\u0000\u0000\u0000\u00e5\u00e6\u0001"+ + "\u0000\u0000\u0000\u00e6\u00e7\u00056\u0000\u0000\u00e7\u00e8\u0003j5"+ + "\u0000\u00e8\u00ea\u0001\u0000\u0000\u0000\u00e9\u00db\u0001\u0000\u0000"+ + "\u0000\u00e9\u00e2\u0001\u0000\u0000\u0000\u00ea\r\u0001\u0000\u0000\u0000"+ + "\u00eb\u00ee\u0003:\u001d\u0000\u00ec\u00ed\u0005%\u0000\u0000\u00ed\u00ef"+ + "\u0003\u001e\u000f\u0000\u00ee\u00ec\u0001\u0000\u0000\u0000\u00ee\u00ef"+ + "\u0001\u0000\u0000\u0000\u00ef\u00f0\u0001\u0000\u0000\u0000\u00f0\u00f1"+ + "\u0005&\u0000\u0000\u00f1\u00f2\u0003D\"\u0000\u00f2\u000f\u0001\u0000"+ + "\u0000\u0000\u00f3\u00f9\u0003\u0012\t\u0000\u00f4\u00f5\u0003\u0012\t"+ + "\u0000\u00f5\u00f6\u0003l6\u0000\u00f6\u00f7\u0003\u0012\t\u0000\u00f7"+ + "\u00f9\u0001\u0000\u0000\u0000\u00f8\u00f3\u0001\u0000\u0000\u0000\u00f8"+ + "\u00f4\u0001\u0000\u0000\u0000\u00f9\u0011\u0001\u0000\u0000\u0000\u00fa"+ + "\u00fb\u0006\t\uffff\uffff\u0000\u00fb\u00ff\u0003\u0014\n\u0000\u00fc"+ + "\u00fd\u0007\u0000\u0000\u0000\u00fd\u00ff\u0003\u0012\t\u0003\u00fe\u00fa"+ + "\u0001\u0000\u0000\u0000\u00fe\u00fc\u0001\u0000\u0000\u0000\u00ff\u0108"+ + "\u0001\u0000\u0000\u0000\u0100\u0101\n\u0002\u0000\u0000\u0101\u0102\u0007"+ + "\u0001\u0000\u0000\u0102\u0107\u0003\u0012\t\u0003\u0103\u0104\n\u0001"+ + "\u0000\u0000\u0104\u0105\u0007\u0000\u0000\u0000\u0105\u0107\u0003\u0012"+ + "\t\u0002\u0106\u0100\u0001\u0000\u0000\u0000\u0106\u0103\u0001\u0000\u0000"+ + "\u0000\u0107\u010a\u0001\u0000\u0000\u0000\u0108\u0106\u0001\u0000\u0000"+ + "\u0000\u0108\u0109\u0001\u0000\u0000\u0000\u0109\u0013\u0001\u0000\u0000"+ + "\u0000\u010a\u0108\u0001\u0000\u0000\u0000\u010b\u010c\u0006\n\uffff\uffff"+ + "\u0000\u010c\u0114\u0003D\"\u0000\u010d\u0114\u0003:\u001d\u0000\u010e"+ + "\u0114\u0003\u0016\u000b\u0000\u010f\u0110\u00050\u0000\u0000\u0110\u0111"+ + "\u0003\n\u0005\u0000\u0111\u0112\u00057\u0000\u0000\u0112\u0114\u0001"+ + "\u0000\u0000\u0000\u0113\u010b\u0001\u0000\u0000\u0000\u0113\u010d\u0001"+ + "\u0000\u0000\u0000\u0113\u010e\u0001\u0000\u0000\u0000\u0113\u010f\u0001"+ + "\u0000\u0000\u0000\u0114\u011a\u0001\u0000\u0000\u0000\u0115\u0116\n\u0001"+ + "\u0000\u0000\u0116\u0117\u0005%\u0000\u0000\u0117\u0119\u0003\u001e\u000f"+ + "\u0000\u0118\u0115\u0001\u0000\u0000\u0000\u0119\u011c\u0001\u0000\u0000"+ + "\u0000\u011a\u0118\u0001\u0000\u0000\u0000\u011a\u011b\u0001\u0000\u0000"+ + "\u0000\u011b\u0015\u0001\u0000\u0000\u0000\u011c\u011a\u0001\u0000\u0000"+ + "\u0000\u011d\u011e\u0003\u0018\f\u0000\u011e\u012c\u00050\u0000\u0000"+ + "\u011f\u012d\u0005B\u0000\u0000\u0120\u0125\u0003\n\u0005\u0000\u0121"+ + "\u0122\u0005\'\u0000\u0000\u0122\u0124\u0003\n\u0005\u0000\u0123\u0121"+ + "\u0001\u0000\u0000\u0000\u0124\u0127\u0001\u0000\u0000\u0000\u0125\u0123"+ + "\u0001\u0000\u0000\u0000\u0125\u0126\u0001\u0000\u0000\u0000\u0126\u012a"+ + "\u0001\u0000\u0000\u0000\u0127\u0125\u0001\u0000\u0000\u0000\u0128\u0129"+ + "\u0005\'\u0000\u0000\u0129\u012b\u0003\u001a\r\u0000\u012a\u0128\u0001"+ + "\u0000\u0000\u0000\u012a\u012b\u0001\u0000\u0000\u0000\u012b\u012d\u0001"+ + "\u0000\u0000\u0000\u012c\u011f\u0001\u0000\u0000\u0000\u012c\u0120\u0001"+ + "\u0000\u0000\u0000\u012c\u012d\u0001\u0000\u0000\u0000\u012d\u012e\u0001"+ + "\u0000\u0000\u0000\u012e\u012f\u00057\u0000\u0000\u012f\u0017\u0001\u0000"+ + "\u0000\u0000\u0130\u0131\u0003H$\u0000\u0131\u0019\u0001\u0000\u0000\u0000"+ + "\u0132\u0133\u0004\r\n\u0000\u0133\u0134\u0005E\u0000\u0000\u0134\u0139"+ + "\u0003\u001c\u000e\u0000\u0135\u0136\u0005\'\u0000\u0000\u0136\u0138\u0003"+ + "\u001c\u000e\u0000\u0137\u0135\u0001\u0000\u0000\u0000\u0138\u013b\u0001"+ + "\u0000\u0000\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u0139\u013a\u0001"+ + "\u0000\u0000\u0000\u013a\u013c\u0001\u0000\u0000\u0000\u013b\u0139\u0001"+ + "\u0000\u0000\u0000\u013c\u013d\u0005F\u0000\u0000\u013d\u001b\u0001\u0000"+ + "\u0000\u0000\u013e\u013f\u0003j5\u0000\u013f\u0140\u0005&\u0000\u0000"+ + "\u0140\u0141\u0003D\"\u0000\u0141\u001d\u0001\u0000\u0000\u0000\u0142"+ + "\u0143\u0003@ \u0000\u0143\u001f\u0001\u0000\u0000\u0000\u0144\u0145\u0005"+ + "\f\u0000\u0000\u0145\u0146\u0003\"\u0011\u0000\u0146!\u0001\u0000\u0000"+ + "\u0000\u0147\u014c\u0003$\u0012\u0000\u0148\u0149\u0005\'\u0000\u0000"+ + "\u0149\u014b\u0003$\u0012\u0000\u014a\u0148\u0001\u0000\u0000\u0000\u014b"+ + "\u014e\u0001\u0000\u0000\u0000\u014c\u014a\u0001\u0000\u0000\u0000\u014c"+ + "\u014d\u0001\u0000\u0000\u0000\u014d#\u0001\u0000\u0000\u0000\u014e\u014c"+ + "\u0001\u0000\u0000\u0000\u014f\u0150\u0003:\u001d\u0000\u0150\u0151\u0005"+ + "$\u0000\u0000\u0151\u0153\u0001\u0000\u0000\u0000\u0152\u014f\u0001\u0000"+ + "\u0000\u0000\u0152\u0153\u0001\u0000\u0000\u0000\u0153\u0154\u0001\u0000"+ + "\u0000\u0000\u0154\u0155\u0003\n\u0005\u0000\u0155%\u0001\u0000\u0000"+ + "\u0000\u0156\u0157\u0005\u0006\u0000\u0000\u0157\u015c\u0003(\u0014\u0000"+ + "\u0158\u0159\u0005\'\u0000\u0000\u0159\u015b\u0003(\u0014\u0000\u015a"+ + "\u0158\u0001\u0000\u0000\u0000\u015b\u015e\u0001\u0000\u0000\u0000\u015c"+ + "\u015a\u0001\u0000\u0000\u0000\u015c\u015d\u0001\u0000\u0000\u0000\u015d"+ + "\u0160\u0001\u0000\u0000\u0000\u015e\u015c\u0001\u0000\u0000\u0000\u015f"+ + "\u0161\u0003.\u0017\u0000\u0160\u015f\u0001\u0000\u0000\u0000\u0160\u0161"+ + "\u0001\u0000\u0000\u0000\u0161\'\u0001\u0000\u0000\u0000\u0162\u0163\u0003"+ + "*\u0015\u0000\u0163\u0164\u0005&\u0000\u0000\u0164\u0166\u0001\u0000\u0000"+ + "\u0000\u0165\u0162\u0001\u0000\u0000\u0000\u0165\u0166\u0001\u0000\u0000"+ + "\u0000\u0166\u0167\u0001\u0000\u0000\u0000\u0167\u0168\u0003,\u0016\u0000"+ + "\u0168)\u0001\u0000\u0000\u0000\u0169\u016a\u0005S\u0000\u0000\u016a+"+ + "\u0001\u0000\u0000\u0000\u016b\u016c\u0007\u0002\u0000\u0000\u016c-\u0001"+ + "\u0000\u0000\u0000\u016d\u016e\u0005R\u0000\u0000\u016e\u0173\u0005S\u0000"+ + "\u0000\u016f\u0170\u0005\'\u0000\u0000\u0170\u0172\u0005S\u0000\u0000"+ + "\u0171\u016f\u0001\u0000\u0000\u0000\u0172\u0175\u0001\u0000\u0000\u0000"+ + "\u0173\u0171\u0001\u0000\u0000\u0000\u0173\u0174\u0001\u0000\u0000\u0000"+ + "\u0174/\u0001\u0000\u0000\u0000\u0175\u0173\u0001\u0000\u0000\u0000\u0176"+ + "\u0177\u0005\u0013\u0000\u0000\u0177\u017c\u0003(\u0014\u0000\u0178\u0179"+ + "\u0005\'\u0000\u0000\u0179\u017b\u0003(\u0014\u0000\u017a\u0178\u0001"+ + "\u0000\u0000\u0000\u017b\u017e\u0001\u0000\u0000\u0000\u017c\u017a\u0001"+ + "\u0000\u0000\u0000\u017c\u017d\u0001\u0000\u0000\u0000\u017d\u0180\u0001"+ + "\u0000\u0000\u0000\u017e\u017c\u0001\u0000\u0000\u0000\u017f\u0181\u0003"+ + "6\u001b\u0000\u0180\u017f\u0001\u0000\u0000\u0000\u0180\u0181\u0001\u0000"+ + "\u0000\u0000\u0181\u0184\u0001\u0000\u0000\u0000\u0182\u0183\u0005!\u0000"+ + "\u0000\u0183\u0185\u0003\"\u0011\u0000\u0184\u0182\u0001\u0000\u0000\u0000"+ + "\u0184\u0185\u0001\u0000\u0000\u0000\u01851\u0001\u0000\u0000\u0000\u0186"+ + "\u0187\u0005\u0004\u0000\u0000\u0187\u0188\u0003\"\u0011\u0000\u01883"+ + "\u0001\u0000\u0000\u0000\u0189\u018b\u0005\u000f\u0000\u0000\u018a\u018c"+ + "\u00036\u001b\u0000\u018b\u018a\u0001\u0000\u0000\u0000\u018b\u018c\u0001"+ + "\u0000\u0000\u0000\u018c\u018f\u0001\u0000\u0000\u0000\u018d\u018e\u0005"+ + "!\u0000\u0000\u018e\u0190\u0003\"\u0011\u0000\u018f\u018d\u0001\u0000"+ + "\u0000\u0000\u018f\u0190\u0001\u0000\u0000\u0000\u01905\u0001\u0000\u0000"+ + "\u0000\u0191\u0196\u00038\u001c\u0000\u0192\u0193\u0005\'\u0000\u0000"+ + "\u0193\u0195\u00038\u001c\u0000\u0194\u0192\u0001\u0000\u0000\u0000\u0195"+ + "\u0198\u0001\u0000\u0000\u0000\u0196\u0194\u0001\u0000\u0000\u0000\u0196"+ + "\u0197\u0001\u0000\u0000\u0000\u01977\u0001\u0000\u0000\u0000\u0198\u0196"+ + "\u0001\u0000\u0000\u0000\u0199\u019c\u0003$\u0012\u0000\u019a\u019b\u0005"+ + "\u0010\u0000\u0000\u019b\u019d\u0003\n\u0005\u0000\u019c\u019a\u0001\u0000"+ + "\u0000\u0000\u019c\u019d\u0001\u0000\u0000\u0000\u019d9\u0001\u0000\u0000"+ + "\u0000\u019e\u01a3\u0003H$\u0000\u019f\u01a0\u0005)\u0000\u0000\u01a0"+ + "\u01a2\u0003H$\u0000\u01a1\u019f\u0001\u0000\u0000\u0000\u01a2\u01a5\u0001"+ + "\u0000\u0000\u0000\u01a3\u01a1\u0001\u0000\u0000\u0000\u01a3\u01a4\u0001"+ + "\u0000\u0000\u0000\u01a4;\u0001\u0000\u0000\u0000\u01a5\u01a3\u0001\u0000"+ + "\u0000\u0000\u01a6\u01ab\u0003B!\u0000\u01a7\u01a8\u0005)\u0000\u0000"+ + "\u01a8\u01aa\u0003B!\u0000\u01a9\u01a7\u0001\u0000\u0000\u0000\u01aa\u01ad"+ + "\u0001\u0000\u0000\u0000\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ab\u01ac"+ + "\u0001\u0000\u0000\u0000\u01ac=\u0001\u0000\u0000\u0000\u01ad\u01ab\u0001"+ + "\u0000\u0000\u0000\u01ae\u01b3\u0003<\u001e\u0000\u01af\u01b0\u0005\'"+ + "\u0000\u0000\u01b0\u01b2\u0003<\u001e\u0000\u01b1\u01af\u0001\u0000\u0000"+ + "\u0000\u01b2\u01b5\u0001\u0000\u0000\u0000\u01b3\u01b1\u0001\u0000\u0000"+ + "\u0000\u01b3\u01b4\u0001\u0000\u0000\u0000\u01b4?\u0001\u0000\u0000\u0000"+ + "\u01b5\u01b3\u0001\u0000\u0000\u0000\u01b6\u01b7\u0007\u0003\u0000\u0000"+ + "\u01b7A\u0001\u0000\u0000\u0000\u01b8\u01bc\u0005W\u0000\u0000\u01b9\u01ba"+ + "\u0004!\u000b\u0000\u01ba\u01bc\u0003F#\u0000\u01bb\u01b8\u0001\u0000"+ + "\u0000\u0000\u01bb\u01b9\u0001\u0000\u0000\u0000\u01bcC\u0001\u0000\u0000"+ + "\u0000\u01bd\u01e8\u00052\u0000\u0000\u01be\u01bf\u0003h4\u0000\u01bf"+ + "\u01c0\u0005J\u0000\u0000\u01c0\u01e8\u0001\u0000\u0000\u0000\u01c1\u01e8"+ + "\u0003f3\u0000\u01c2\u01e8\u0003h4\u0000\u01c3\u01e8\u0003b1\u0000\u01c4"+ + "\u01e8\u0003F#\u0000\u01c5\u01e8\u0003j5\u0000\u01c6\u01c7\u0005H\u0000"+ + "\u0000\u01c7\u01cc\u0003d2\u0000\u01c8\u01c9\u0005\'\u0000\u0000\u01c9"+ + "\u01cb\u0003d2\u0000\u01ca\u01c8\u0001\u0000\u0000\u0000\u01cb\u01ce\u0001"+ + "\u0000\u0000\u0000\u01cc\u01ca\u0001\u0000\u0000\u0000\u01cc\u01cd\u0001"+ + "\u0000\u0000\u0000\u01cd\u01cf\u0001\u0000\u0000\u0000\u01ce\u01cc\u0001"+ + "\u0000\u0000\u0000\u01cf\u01d0\u0005I\u0000\u0000\u01d0\u01e8\u0001\u0000"+ + "\u0000\u0000\u01d1\u01d2\u0005H\u0000\u0000\u01d2\u01d7\u0003b1\u0000"+ + "\u01d3\u01d4\u0005\'\u0000\u0000\u01d4\u01d6\u0003b1\u0000\u01d5\u01d3"+ + "\u0001\u0000\u0000\u0000\u01d6\u01d9\u0001\u0000\u0000\u0000\u01d7\u01d5"+ + "\u0001\u0000\u0000\u0000\u01d7\u01d8\u0001\u0000\u0000\u0000\u01d8\u01da"+ + "\u0001\u0000\u0000\u0000\u01d9\u01d7\u0001\u0000\u0000\u0000\u01da\u01db"+ + "\u0005I\u0000\u0000\u01db\u01e8\u0001\u0000\u0000\u0000\u01dc\u01dd\u0005"+ + "H\u0000\u0000\u01dd\u01e2\u0003j5\u0000\u01de\u01df\u0005\'\u0000\u0000"+ + "\u01df\u01e1\u0003j5\u0000\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1\u01e4"+ + "\u0001\u0000\u0000\u0000\u01e2\u01e0\u0001\u0000\u0000\u0000\u01e2\u01e3"+ + "\u0001\u0000\u0000\u0000\u01e3\u01e5\u0001\u0000\u0000\u0000\u01e4\u01e2"+ + "\u0001\u0000\u0000\u0000\u01e5\u01e6\u0005I\u0000\u0000\u01e6\u01e8\u0001"+ + "\u0000\u0000\u0000\u01e7\u01bd\u0001\u0000\u0000\u0000\u01e7\u01be\u0001"+ + "\u0000\u0000\u0000\u01e7\u01c1\u0001\u0000\u0000\u0000\u01e7\u01c2\u0001"+ + "\u0000\u0000\u0000\u01e7\u01c3\u0001\u0000\u0000\u0000\u01e7\u01c4\u0001"+ + "\u0000\u0000\u0000\u01e7\u01c5\u0001\u0000\u0000\u0000\u01e7\u01c6\u0001"+ + "\u0000\u0000\u0000\u01e7\u01d1\u0001\u0000\u0000\u0000\u01e7\u01dc\u0001"+ + "\u0000\u0000\u0000\u01e8E\u0001\u0000\u0000\u0000\u01e9\u01ec\u00055\u0000"+ + "\u0000\u01ea\u01ec\u0005G\u0000\u0000\u01eb\u01e9\u0001\u0000\u0000\u0000"+ + "\u01eb\u01ea\u0001\u0000\u0000\u0000\u01ecG\u0001\u0000\u0000\u0000\u01ed"+ + "\u01f1\u0003@ \u0000\u01ee\u01ef\u0004$\f\u0000\u01ef\u01f1\u0003F#\u0000"+ + "\u01f0\u01ed\u0001\u0000\u0000\u0000\u01f0\u01ee\u0001\u0000\u0000\u0000"+ + "\u01f1I\u0001\u0000\u0000\u0000\u01f2\u01f3\u0005\t\u0000\u0000\u01f3"+ + "\u01f4\u0005\u001f\u0000\u0000\u01f4K\u0001\u0000\u0000\u0000\u01f5\u01f6"+ + "\u0005\u000e\u0000\u0000\u01f6\u01fb\u0003N\'\u0000\u01f7\u01f8\u0005"+ + "\'\u0000\u0000\u01f8\u01fa\u0003N\'\u0000\u01f9\u01f7\u0001\u0000\u0000"+ + "\u0000\u01fa\u01fd\u0001\u0000\u0000\u0000\u01fb\u01f9\u0001\u0000\u0000"+ + "\u0000\u01fb\u01fc\u0001\u0000\u0000\u0000\u01fcM\u0001\u0000\u0000\u0000"+ + "\u01fd\u01fb\u0001\u0000\u0000\u0000\u01fe\u0200\u0003\n\u0005\u0000\u01ff"+ + "\u0201\u0007\u0004\u0000\u0000\u0200\u01ff\u0001\u0000\u0000\u0000\u0200"+ + "\u0201\u0001\u0000\u0000\u0000\u0201\u0204\u0001\u0000\u0000\u0000\u0202"+ + "\u0203\u00053\u0000\u0000\u0203\u0205\u0007\u0005\u0000\u0000\u0204\u0202"+ + "\u0001\u0000\u0000\u0000\u0204\u0205\u0001\u0000\u0000\u0000\u0205O\u0001"+ + "\u0000\u0000\u0000\u0206\u0207\u0005\b\u0000\u0000\u0207\u0208\u0003>"+ + "\u001f\u0000\u0208Q\u0001\u0000\u0000\u0000\u0209\u020a\u0005\u0002\u0000"+ + "\u0000\u020a\u020b\u0003>\u001f\u0000\u020bS\u0001\u0000\u0000\u0000\u020c"+ + "\u020d\u0005\u000b\u0000\u0000\u020d\u0212\u0003V+\u0000\u020e\u020f\u0005"+ + "\'\u0000\u0000\u020f\u0211\u0003V+\u0000\u0210\u020e\u0001\u0000\u0000"+ + "\u0000\u0211\u0214\u0001\u0000\u0000\u0000\u0212\u0210\u0001\u0000\u0000"+ + "\u0000\u0212\u0213\u0001\u0000\u0000\u0000\u0213U\u0001\u0000\u0000\u0000"+ + "\u0214\u0212\u0001\u0000\u0000\u0000\u0215\u0216\u0003<\u001e\u0000\u0216"+ + "\u0217\u0005[\u0000\u0000\u0217\u0218\u0003<\u001e\u0000\u0218W\u0001"+ + "\u0000\u0000\u0000\u0219\u021a\u0005\u0001\u0000\u0000\u021a\u021b\u0003"+ + "\u0014\n\u0000\u021b\u021d\u0003j5\u0000\u021c\u021e\u0003^/\u0000\u021d"+ + "\u021c\u0001\u0000\u0000\u0000\u021d\u021e\u0001\u0000\u0000\u0000\u021e"+ + "Y\u0001\u0000\u0000\u0000\u021f\u0220\u0005\u0007\u0000\u0000\u0220\u0221"+ + "\u0003\u0014\n\u0000\u0221\u0222\u0003j5\u0000\u0222[\u0001\u0000\u0000"+ + "\u0000\u0223\u0224\u0005\n\u0000\u0000\u0224\u0225\u0003:\u001d\u0000"+ + "\u0225]\u0001\u0000\u0000\u0000\u0226\u022b\u0003`0\u0000\u0227\u0228"+ + "\u0005\'\u0000\u0000\u0228\u022a\u0003`0\u0000\u0229\u0227\u0001\u0000"+ + "\u0000\u0000\u022a\u022d\u0001\u0000\u0000\u0000\u022b\u0229\u0001\u0000"+ + "\u0000\u0000\u022b\u022c\u0001\u0000\u0000\u0000\u022c_\u0001\u0000\u0000"+ + "\u0000\u022d\u022b\u0001\u0000\u0000\u0000\u022e\u022f\u0003@ \u0000\u022f"+ + "\u0230\u0005$\u0000\u0000\u0230\u0231\u0003D\"\u0000\u0231a\u0001\u0000"+ + "\u0000\u0000\u0232\u0233\u0007\u0006\u0000\u0000\u0233c\u0001\u0000\u0000"+ + "\u0000\u0234\u0237\u0003f3\u0000\u0235\u0237\u0003h4\u0000\u0236\u0234"+ + "\u0001\u0000\u0000\u0000\u0236\u0235\u0001\u0000\u0000\u0000\u0237e\u0001"+ + "\u0000\u0000\u0000\u0238\u023a\u0007\u0000\u0000\u0000\u0239\u0238\u0001"+ + "\u0000\u0000\u0000\u0239\u023a\u0001\u0000\u0000\u0000\u023a\u023b\u0001"+ + "\u0000\u0000\u0000\u023b\u023c\u0005 \u0000\u0000\u023cg\u0001\u0000\u0000"+ + "\u0000\u023d\u023f\u0007\u0000\u0000\u0000\u023e\u023d\u0001\u0000\u0000"+ + "\u0000\u023e\u023f\u0001\u0000\u0000\u0000\u023f\u0240\u0001\u0000\u0000"+ + "\u0000\u0240\u0241\u0005\u001f\u0000\u0000\u0241i\u0001\u0000\u0000\u0000"+ + "\u0242\u0243\u0005\u001e\u0000\u0000\u0243k\u0001\u0000\u0000\u0000\u0244"+ + "\u0245\u0007\u0007\u0000\u0000\u0245m\u0001\u0000\u0000\u0000\u0246\u0247"+ + "\u0005\u0005\u0000\u0000\u0247\u0248\u0003p8\u0000\u0248o\u0001\u0000"+ + "\u0000\u0000\u0249\u024a\u0005H\u0000\u0000\u024a\u024b\u0003\u0002\u0001"+ + "\u0000\u024b\u024c\u0005I\u0000\u0000\u024cq\u0001\u0000\u0000\u0000\u024d"+ + "\u024e\u0005\r\u0000\u0000\u024e\u024f\u0005k\u0000\u0000\u024fs\u0001"+ + "\u0000\u0000\u0000\u0250\u0251\u0005\u0003\u0000\u0000\u0251\u0254\u0005"+ + "a\u0000\u0000\u0252\u0253\u0005_\u0000\u0000\u0253\u0255\u0003<\u001e"+ + "\u0000\u0254\u0252\u0001\u0000\u0000\u0000\u0254\u0255\u0001\u0000\u0000"+ + "\u0000\u0255\u025f\u0001\u0000\u0000\u0000\u0256\u0257\u0005`\u0000\u0000"+ + "\u0257\u025c\u0003v;\u0000\u0258\u0259\u0005\'\u0000\u0000\u0259\u025b"+ + "\u0003v;\u0000\u025a\u0258\u0001\u0000\u0000\u0000\u025b\u025e\u0001\u0000"+ + "\u0000\u0000\u025c\u025a\u0001\u0000\u0000\u0000\u025c\u025d\u0001\u0000"+ + "\u0000\u0000\u025d\u0260\u0001\u0000\u0000\u0000\u025e\u025c\u0001\u0000"+ + "\u0000\u0000\u025f\u0256\u0001\u0000\u0000\u0000\u025f\u0260\u0001\u0000"+ + "\u0000\u0000\u0260u\u0001\u0000\u0000\u0000\u0261\u0262\u0003<\u001e\u0000"+ + "\u0262\u0263\u0005$\u0000\u0000\u0263\u0265\u0001\u0000\u0000\u0000\u0264"+ + "\u0261\u0001\u0000\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000\u0265"+ + "\u0266\u0001\u0000\u0000\u0000\u0266\u0267\u0003<\u001e\u0000\u0267w\u0001"+ + "\u0000\u0000\u0000\u0268\u0269\u0005\u0012\u0000\u0000\u0269\u026a\u0003"+ + "(\u0014\u0000\u026a\u026b\u0005_\u0000\u0000\u026b\u026c\u0003>\u001f"+ + "\u0000\u026cy\u0001\u0000\u0000\u0000\u026d\u026e\u0005\u0011\u0000\u0000"+ + "\u026e\u0271\u00036\u001b\u0000\u026f\u0270\u0005!\u0000\u0000\u0270\u0272"+ + "\u0003\"\u0011\u0000\u0271\u026f\u0001\u0000\u0000\u0000\u0271\u0272\u0001"+ + "\u0000\u0000\u0000\u0272{\u0001\u0000\u0000\u0000\u0273\u0275\u0007\b"+ + "\u0000\u0000\u0274\u0273\u0001\u0000\u0000\u0000\u0274\u0275\u0001\u0000"+ + "\u0000\u0000\u0275\u0276\u0001\u0000\u0000\u0000\u0276\u0277\u0005\u0014"+ + "\u0000\u0000\u0277\u0278\u0003~?\u0000\u0278\u0279\u0003\u0080@\u0000"+ + "\u0279}\u0001\u0000\u0000\u0000\u027a\u027d\u0003@ \u0000\u027b\u027c"+ + "\u0005[\u0000\u0000\u027c\u027e\u0003@ \u0000\u027d\u027b\u0001\u0000"+ + "\u0000\u0000\u027d\u027e\u0001\u0000\u0000\u0000\u027e\u007f\u0001\u0000"+ + "\u0000\u0000\u027f\u0280\u0005_\u0000\u0000\u0280\u0285\u0003\u0082A\u0000"+ + "\u0281\u0282\u0005\'\u0000\u0000\u0282\u0284\u0003\u0082A\u0000\u0283"+ + "\u0281\u0001\u0000\u0000\u0000\u0284\u0287\u0001\u0000\u0000\u0000\u0285"+ + "\u0283\u0001\u0000\u0000\u0000\u0285\u0286\u0001\u0000\u0000\u0000\u0286"+ + "\u0081\u0001\u0000\u0000\u0000\u0287\u0285\u0001\u0000\u0000\u0000\u0288"+ + "\u0289\u0003\u0010\b\u0000\u0289\u0083\u0001\u0000\u0000\u0000?\u008f"+ + "\u0098\u00ac\u00b8\u00c1\u00c9\u00ce\u00d6\u00d8\u00dd\u00e4\u00e9\u00ee"+ + "\u00f8\u00fe\u0106\u0108\u0113\u011a\u0125\u012a\u012c\u0139\u014c\u0152"+ + "\u015c\u0160\u0165\u0173\u017c\u0180\u0184\u018b\u018f\u0196\u019c\u01a3"+ + "\u01ab\u01b3\u01bb\u01cc\u01d7\u01e2\u01e7\u01eb\u01f0\u01fb\u0200\u0204"+ + "\u0212\u021d\u022b\u0236\u0239\u023e\u0254\u025c\u025f\u0264\u0271\u0274"+ + "\u027d\u0285"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 2ee0efe52dfbe..df6d8f1e35013 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -344,6 +344,30 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

    The default implementation does nothing.

    */ @Override public void exitFunctionName(EsqlBaseParser.FunctionNameContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterMapExpression(EsqlBaseParser.MapExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitMapExpression(EsqlBaseParser.MapExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterEntryExpression(EsqlBaseParser.EntryExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitEntryExpression(EsqlBaseParser.EntryExpressionContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index c5c1b0dfa7d7d..885ba91e20dcd 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -209,6 +209,20 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitFunctionName(EsqlBaseParser.FunctionNameContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitMapExpression(EsqlBaseParser.MapExpressionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitEntryExpression(EsqlBaseParser.EntryExpressionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index f45184e920658..2c1faa374695e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -323,6 +323,26 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitFunctionName(EsqlBaseParser.FunctionNameContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#mapExpression}. + * @param ctx the parse tree + */ + void enterMapExpression(EsqlBaseParser.MapExpressionContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#mapExpression}. + * @param ctx the parse tree + */ + void exitMapExpression(EsqlBaseParser.MapExpressionContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#entryExpression}. + * @param ctx the parse tree + */ + void enterEntryExpression(EsqlBaseParser.EntryExpressionContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#entryExpression}. + * @param ctx the parse tree + */ + void exitEntryExpression(EsqlBaseParser.EntryExpressionContext ctx); /** * Enter a parse tree produced by the {@code toDataType} * labeled alternative in {@link EsqlBaseParser#dataType}. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 30c5e0ce78092..73afd23393cdb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -199,6 +199,18 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitFunctionName(EsqlBaseParser.FunctionNameContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#mapExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitMapExpression(EsqlBaseParser.MapExpressionContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#entryExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitEntryExpression(EsqlBaseParser.EntryExpressionContext ctx); /** * Visit a parse tree produced by the {@code toDataType} * labeled alternative in {@link EsqlBaseParser#dataType}. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index eb81446f9ddea..283e305d79270 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -23,15 +23,11 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.MapExpression; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedStar; import org.elasticsearch.xpack.esql.core.expression.function.Function; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RegexMatch; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; @@ -48,6 +44,11 @@ import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNotNull; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; @@ -76,6 +77,8 @@ import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.esql.core.type.DataType.DATE_PERIOD; +import static org.elasticsearch.xpack.esql.core.type.DataType.KEYWORD; +import static org.elasticsearch.xpack.esql.core.type.DataType.NULL; import static org.elasticsearch.xpack.esql.core.type.DataType.TIME_DURATION; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.asLongUnsigned; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.unsignedLongAsNumber; @@ -597,6 +600,10 @@ public UnresolvedAttribute visitDereference(EsqlBaseParser.DereferenceContext ct public Expression visitFunctionExpression(EsqlBaseParser.FunctionExpressionContext ctx) { String name = visitFunctionName(ctx.functionName()); List args = expressions(ctx.booleanExpression()); + if (ctx.mapExpression() != null) { + MapExpression mapArg = visitMapExpression(ctx.mapExpression()); + args.add(mapArg); + } if ("is_null".equals(EsqlFunctionRegistry.normalizeName(name))) { throw new ParsingException( source(ctx), @@ -617,6 +624,44 @@ public String visitFunctionName(EsqlBaseParser.FunctionNameContext ctx) { return visitIdentifierOrParameter(ctx.identifierOrParameter()); } + @Override + public MapExpression visitMapExpression(EsqlBaseParser.MapExpressionContext ctx) { + List namedArgs = new ArrayList<>(ctx.entryExpression().size()); + List names = new ArrayList<>(ctx.entryExpression().size()); + List kvCtx = ctx.entryExpression(); + for (EsqlBaseParser.EntryExpressionContext entry : kvCtx) { + EsqlBaseParser.StringContext stringCtx = entry.string(); + String key = unquote(stringCtx.QUOTED_STRING().getText()); // key is case-sensitive + if (key.isBlank()) { + throw new ParsingException( + source(ctx), + "Invalid named function argument [{}], empty key is not supported", + entry.getText() + ); + } + if (names.contains(key)) { + throw new ParsingException(source(ctx), "Duplicated function arguments with the same name [{}] is not supported", key); + } + Expression value = expression(entry.constant()); + String entryText = entry.getText(); + if (value instanceof Literal l) { + if (l.dataType() == NULL) { + throw new ParsingException(source(ctx), "Invalid named function argument [{}], NULL is not supported", entryText); + } + namedArgs.add(new Literal(source(stringCtx), key, KEYWORD)); + namedArgs.add(l); + names.add(key); + } else { + throw new ParsingException( + source(ctx), + "Invalid named function argument [{}], only constant value is supported", + entryText + ); + } + } + return new MapExpression(Source.EMPTY, namedArgs); + } + @Override public String visitIdentifierOrParameter(EsqlBaseParser.IdentifierOrParameterContext ctx) { if (ctx.identifier() != null) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index 4b7c0118acda3..cc1ac6ac31385 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -16,7 +16,6 @@ import org.elasticsearch.dissect.DissectParser; import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.VerificationException; -import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.common.Failure; import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; @@ -262,8 +261,7 @@ public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { for (var c : ctx.metadata().UNQUOTED_SOURCE()) { String id = c.getText(); Source src = source(c); - if (MetadataAttribute.isSupported(id) == false // TODO: drop check below once METADATA_SCORE is no longer snapshot-only - || (EsqlCapabilities.Cap.METADATA_SCORE.isEnabled() == false && MetadataAttribute.SCORE.equals(id))) { + if (MetadataAttribute.isSupported(id) == false) { throw new ParsingException(src, "unsupported metadata field [" + id + "]"); } Attribute a = metadataMap.put(id, MetadataAttribute.create(src, id)); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java index df0b258679d4c..90b3aa8625087 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/EsRelation.java @@ -27,6 +27,9 @@ import java.util.Map; import java.util.Map.Entry; import java.util.Objects; +import java.util.Set; + +import static org.elasticsearch.TransportVersions.ESQL_SKIP_ES_INDEX_SERIALIZATION; public class EsRelation extends LeafPlan { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( @@ -35,30 +38,41 @@ public class EsRelation extends LeafPlan { EsRelation::readFrom ); - private final EsIndex index; - private final List attrs; - private final boolean frozen; + private final String indexPattern; private final IndexMode indexMode; + private final Map indexNameWithModes; + private final List attrs; - public EsRelation(Source source, EsIndex index, IndexMode indexMode, boolean frozen) { - this(source, index, flatten(source, index.mapping()), indexMode, frozen); - } - - public EsRelation(Source source, EsIndex index, List attributes, IndexMode indexMode) { - this(source, index, attributes, indexMode, false); + public EsRelation(Source source, EsIndex index, IndexMode indexMode) { + this(source, index.name(), indexMode, index.indexNameWithModes(), flatten(source, index.mapping())); } - public EsRelation(Source source, EsIndex index, List attributes, IndexMode indexMode, boolean frozen) { + public EsRelation( + Source source, + String indexPattern, + IndexMode indexMode, + Map indexNameWithModes, + List attributes + ) { super(source); - this.index = index; - this.attrs = attributes; + this.indexPattern = indexPattern; this.indexMode = indexMode; - this.frozen = frozen; + this.indexNameWithModes = indexNameWithModes; + this.attrs = attributes; } private static EsRelation readFrom(StreamInput in) throws IOException { Source source = Source.readFrom((PlanStreamInput) in); - EsIndex esIndex = EsIndex.readFrom(in); + String indexPattern; + Map indexNameWithModes; + if (in.getTransportVersion().onOrAfter(ESQL_SKIP_ES_INDEX_SERIALIZATION)) { + indexPattern = in.readString(); + indexNameWithModes = in.readMap(IndexMode::readFrom); + } else { + var index = EsIndex.readFrom(in); + indexPattern = index.name(); + indexNameWithModes = index.indexNameWithModes(); + } List attributes = in.readNamedWriteableCollectionAsList(Attribute.class); if (supportingEsSourceOptions(in.getTransportVersion())) { // We don't do anything with these strings @@ -67,23 +81,32 @@ private static EsRelation readFrom(StreamInput in) throws IOException { in.readOptionalString(); } IndexMode indexMode = readIndexMode(in); - boolean frozen = in.readBoolean(); - return new EsRelation(source, esIndex, attributes, indexMode, frozen); + if (in.getTransportVersion().before(ESQL_SKIP_ES_INDEX_SERIALIZATION)) { + in.readBoolean(); + } + return new EsRelation(source, indexPattern, indexMode, indexNameWithModes, attributes); } @Override public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); - index().writeTo(out); - out.writeNamedWriteableCollection(output()); + if (out.getTransportVersion().onOrAfter(ESQL_SKIP_ES_INDEX_SERIALIZATION)) { + out.writeString(indexPattern); + out.writeMap(indexNameWithModes, (o, v) -> IndexMode.writeTo(v, out)); + } else { + new EsIndex(indexPattern, Map.of(), indexNameWithModes).writeTo(out); + } + out.writeNamedWriteableCollection(attrs); if (supportingEsSourceOptions(out.getTransportVersion())) { // write (null) string fillers expected by remote out.writeOptionalString(null); out.writeOptionalString(null); out.writeOptionalString(null); } - writeIndexMode(out, indexMode()); - out.writeBoolean(frozen()); + writeIndexMode(out, indexMode); + if (out.getTransportVersion().before(ESQL_SKIP_ES_INDEX_SERIALIZATION)) { + out.writeBoolean(false); + } } private static boolean supportingEsSourceOptions(TransportVersion version) { @@ -97,7 +120,7 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create(this, EsRelation::new, index, attrs, indexMode, frozen); + return NodeInfo.create(this, EsRelation::new, indexPattern, indexMode, indexNameWithModes, attrs); } private static List flatten(Source source, Map mapping) { @@ -128,23 +151,27 @@ private static List flatten(Source source, Map mappi return list; } - public EsIndex index() { - return index; - } - - public boolean frozen() { - return frozen; + public String indexPattern() { + return indexPattern; } public IndexMode indexMode() { return indexMode; } + public Map indexNameWithModes() { + return indexNameWithModes; + } + @Override public List output() { return attrs; } + public Set concreteIndices() { + return indexNameWithModes.keySet(); + } + @Override public String commandName() { return "FROM"; @@ -159,7 +186,7 @@ public boolean expressionsResolved() { @Override public int hashCode() { - return Objects.hash(index, indexMode, frozen, attrs); + return Objects.hash(indexPattern, indexMode, indexNameWithModes, attrs); } @Override @@ -173,9 +200,9 @@ public boolean equals(Object obj) { } EsRelation other = (EsRelation) obj; - return Objects.equals(index, other.index) - && indexMode == other.indexMode() - && frozen == other.frozen + return Objects.equals(indexPattern, other.indexPattern) + && Objects.equals(indexMode, other.indexMode) + && Objects.equals(indexNameWithModes, other.indexNameWithModes) && Objects.equals(attrs, other.attrs); } @@ -183,7 +210,7 @@ public boolean equals(Object obj) { public String nodeString() { return nodeName() + "[" - + index + + indexPattern + "]" + (indexMode != IndexMode.STANDARD ? "[" + indexMode.name() + "]" : "") + NodeUtils.limitedToString(attrs); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java index ab533899aaff6..a3fc62d935795 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExec.java @@ -36,22 +36,25 @@ import java.util.Map; import java.util.Objects; +import static org.elasticsearch.TransportVersions.ESQL_SKIP_ES_INDEX_SERIALIZATION; + public class EsQueryExec extends LeafExec implements EstimatesRowSize { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( PhysicalPlan.class, "EsQueryExec", - EsQueryExec::deserialize + EsQueryExec::readFrom ); public static final EsField DOC_ID_FIELD = new EsField("_doc", DataType.DOC_DATA_TYPE, Map.of(), false); public static final List NO_SORTS = List.of(); // only exists to mimic older serialization, but we no longer serialize sorts - private final EsIndex index; + private final String indexPattern; private final IndexMode indexMode; + private final Map indexNameWithModes; + private final List attrs; private final QueryBuilder query; private final Expression limit; private final List sorts; - private final List attrs; /** * Estimate of the number of bytes that'll be loaded per position before @@ -108,14 +111,22 @@ public FieldAttribute field() { } } - public EsQueryExec(Source source, EsIndex index, IndexMode indexMode, List attributes, QueryBuilder query) { - this(source, index, indexMode, attributes, query, null, null, null); + public EsQueryExec( + Source source, + String indexPattern, + IndexMode indexMode, + Map indexNameWithModes, + List attributes, + QueryBuilder query + ) { + this(source, indexPattern, indexMode, indexNameWithModes, attributes, query, null, null, null); } public EsQueryExec( Source source, - EsIndex index, + String indexPattern, IndexMode indexMode, + Map indexNameWithModes, List attrs, QueryBuilder query, Expression limit, @@ -123,10 +134,11 @@ public EsQueryExec( Integer estimatedRowSize ) { super(source); - this.index = index; + this.indexPattern = indexPattern; this.indexMode = indexMode; - this.query = query; + this.indexNameWithModes = indexNameWithModes; this.attrs = attrs; + this.query = query; this.limit = limit; this.sorts = sorts; this.estimatedRowSize = estimatedRowSize; @@ -136,9 +148,18 @@ public EsQueryExec( * The matching constructor is used during physical plan optimization and needs valid sorts. But we no longer serialize sorts. * If this cluster node is talking to an older instance it might receive a plan with sorts, but it will ignore them. */ - public static EsQueryExec deserialize(StreamInput in) throws IOException { + private static EsQueryExec readFrom(StreamInput in) throws IOException { var source = Source.readFrom((PlanStreamInput) in); - var index = EsIndex.readFrom(in); + String indexPattern; + Map indexNameWithModes; + if (in.getTransportVersion().onOrAfter(ESQL_SKIP_ES_INDEX_SERIALIZATION)) { + indexPattern = in.readString(); + indexNameWithModes = in.readMap(IndexMode::readFrom); + } else { + var index = EsIndex.readFrom(in); + indexPattern = index.name(); + indexNameWithModes = index.indexNameWithModes(); + } var indexMode = EsRelation.readIndexMode(in); var attrs = in.readNamedWriteableCollectionAsList(Attribute.class); var query = in.readOptionalNamedWriteable(QueryBuilder.class); @@ -146,7 +167,7 @@ public static EsQueryExec deserialize(StreamInput in) throws IOException { in.readOptionalCollectionAsList(EsQueryExec::readSort); var rowSize = in.readOptionalVInt(); // Ignore sorts from the old serialization format - return new EsQueryExec(source, index, indexMode, attrs, query, limit, NO_SORTS, rowSize); + return new EsQueryExec(source, indexPattern, indexMode, indexNameWithModes, attrs, query, limit, NO_SORTS, rowSize); } private static Sort readSort(StreamInput in) throws IOException { @@ -160,7 +181,12 @@ private static void writeSort(StreamOutput out, Sort sort) { @Override public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); - index().writeTo(out); + if (out.getTransportVersion().onOrAfter(ESQL_SKIP_ES_INDEX_SERIALIZATION)) { + out.writeString(indexPattern); + out.writeMap(indexNameWithModes, (o, v) -> IndexMode.writeTo(v, out)); + } else { + new EsIndex(indexPattern, Map.of(), indexNameWithModes).writeTo(out); + } EsRelation.writeIndexMode(out, indexMode()); out.writeNamedWriteableCollection(output()); out.writeOptionalNamedWriteable(query()); @@ -180,17 +206,32 @@ public static boolean isSourceAttribute(Attribute attr) { @Override protected NodeInfo info() { - return NodeInfo.create(this, EsQueryExec::new, index, indexMode, attrs, query, limit, sorts, estimatedRowSize); + return NodeInfo.create( + this, + EsQueryExec::new, + indexPattern, + indexMode, + indexNameWithModes, + attrs, + query, + limit, + sorts, + estimatedRowSize + ); } - public EsIndex index() { - return index; + public String indexPattern() { + return indexPattern; } public IndexMode indexMode() { return indexMode; } + public Map indexNameWithModes() { + return indexNameWithModes; + } + public QueryBuilder query() { return query; } @@ -234,13 +275,13 @@ public PhysicalPlan estimateRowSize(State state) { } return Objects.equals(this.estimatedRowSize, size) ? this - : new EsQueryExec(source(), index, indexMode, attrs, query, limit, sorts, size); + : new EsQueryExec(source(), indexPattern, indexMode, indexNameWithModes, attrs, query, limit, sorts, size); } public EsQueryExec withLimit(Expression limit) { return Objects.equals(this.limit, limit) ? this - : new EsQueryExec(source(), index, indexMode, attrs, query, limit, sorts, estimatedRowSize); + : new EsQueryExec(source(), indexPattern, indexMode, indexNameWithModes, attrs, query, limit, sorts, estimatedRowSize); } public boolean canPushSorts() { @@ -254,12 +295,12 @@ public EsQueryExec withSorts(List sorts) { } return Objects.equals(this.sorts, sorts) ? this - : new EsQueryExec(source(), index, indexMode, attrs, query, limit, sorts, estimatedRowSize); + : new EsQueryExec(source(), indexPattern, indexMode, indexNameWithModes, attrs, query, limit, sorts, estimatedRowSize); } @Override public int hashCode() { - return Objects.hash(index, indexMode, attrs, query, limit, sorts); + return Objects.hash(indexPattern, indexMode, indexNameWithModes, attrs, query, limit, sorts); } @Override @@ -273,8 +314,9 @@ public boolean equals(Object obj) { } EsQueryExec other = (EsQueryExec) obj; - return Objects.equals(index, other.index) + return Objects.equals(indexPattern, other.indexPattern) && Objects.equals(indexMode, other.indexMode) + && Objects.equals(indexNameWithModes, other.indexNameWithModes) && Objects.equals(attrs, other.attrs) && Objects.equals(query, other.query) && Objects.equals(limit, other.limit) @@ -286,7 +328,7 @@ public boolean equals(Object obj) { public String nodeString() { return nodeName() + "[" - + index + + indexPattern + "], " + "indexMode[" + indexMode diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsSourceExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsSourceExec.java index eeeafc52f158b..5da3ef9f72dd3 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsSourceExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsSourceExec.java @@ -22,46 +22,71 @@ import java.io.IOException; import java.util.List; +import java.util.Map; import java.util.Objects; +import static org.elasticsearch.TransportVersions.ESQL_SKIP_ES_INDEX_SERIALIZATION; + public class EsSourceExec extends LeafExec { public static final NamedWriteableRegistry.Entry ENTRY = new NamedWriteableRegistry.Entry( PhysicalPlan.class, "EsSourceExec", - EsSourceExec::new + EsSourceExec::readFrom ); - private final EsIndex index; + private final String indexPattern; + private final IndexMode indexMode; + private final Map indexNameWithModes; private final List attributes; private final QueryBuilder query; - private final IndexMode indexMode; public EsSourceExec(EsRelation relation) { - this(relation.source(), relation.index(), relation.output(), null, relation.indexMode()); + this(relation.source(), relation.indexPattern(), relation.indexMode(), relation.indexNameWithModes(), relation.output(), null); } - public EsSourceExec(Source source, EsIndex index, List attributes, QueryBuilder query, IndexMode indexMode) { + public EsSourceExec( + Source source, + String indexPattern, + IndexMode indexMode, + Map indexNameWithModes, + List attributes, + QueryBuilder query + ) { super(source); - this.index = index; + this.indexPattern = indexPattern; + this.indexMode = indexMode; + this.indexNameWithModes = indexNameWithModes; this.attributes = attributes; this.query = query; - this.indexMode = indexMode; } - private EsSourceExec(StreamInput in) throws IOException { - this( - Source.readFrom((PlanStreamInput) in), - EsIndex.readFrom(in), - in.readNamedWriteableCollectionAsList(Attribute.class), - in.readOptionalNamedWriteable(QueryBuilder.class), - EsRelation.readIndexMode(in) - ); + private static EsSourceExec readFrom(StreamInput in) throws IOException { + var source = Source.readFrom((PlanStreamInput) in); + String indexPattern; + Map indexNameWithModes; + if (in.getTransportVersion().onOrAfter(ESQL_SKIP_ES_INDEX_SERIALIZATION)) { + indexPattern = in.readString(); + indexNameWithModes = in.readMap(IndexMode::readFrom); + } else { + var index = EsIndex.readFrom(in); + indexPattern = index.name(); + indexNameWithModes = index.indexNameWithModes(); + } + var attributes = in.readNamedWriteableCollectionAsList(Attribute.class); + var query = in.readOptionalNamedWriteable(QueryBuilder.class); + var indexMode = EsRelation.readIndexMode(in); + return new EsSourceExec(source, indexPattern, indexMode, indexNameWithModes, attributes, query); } @Override public void writeTo(StreamOutput out) throws IOException { Source.EMPTY.writeTo(out); - index().writeTo(out); + if (out.getTransportVersion().onOrAfter(ESQL_SKIP_ES_INDEX_SERIALIZATION)) { + out.writeString(indexPattern); + out.writeMap(indexNameWithModes, (o, v) -> IndexMode.writeTo(v, out)); + } else { + new EsIndex(indexPattern, Map.of(), indexNameWithModes).writeTo(out); + } out.writeNamedWriteableCollection(output()); out.writeOptionalNamedWriteable(query()); EsRelation.writeIndexMode(out, indexMode()); @@ -72,18 +97,22 @@ public String getWriteableName() { return ENTRY.name; } - public EsIndex index() { - return index; - } - - public QueryBuilder query() { - return query; + public String indexPattern() { + return indexPattern; } public IndexMode indexMode() { return indexMode; } + public Map indexNameWithModes() { + return indexNameWithModes; + } + + public QueryBuilder query() { + return query; + } + @Override public List output() { return attributes; @@ -91,12 +120,12 @@ public List output() { @Override protected NodeInfo info() { - return NodeInfo.create(this, EsSourceExec::new, index, attributes, query, indexMode); + return NodeInfo.create(this, EsSourceExec::new, indexPattern, indexMode, indexNameWithModes, attributes, query); } @Override public int hashCode() { - return Objects.hash(index, attributes, query, indexMode); + return Objects.hash(indexPattern, indexMode, indexNameWithModes, attributes, query); } @Override @@ -110,14 +139,15 @@ public boolean equals(Object obj) { } EsSourceExec other = (EsSourceExec) obj; - return Objects.equals(index, other.index) + return Objects.equals(indexPattern, other.indexPattern) + && Objects.equals(indexMode, other.indexMode) + && Objects.equals(indexNameWithModes, other.indexNameWithModes) && Objects.equals(attributes, other.attributes) - && Objects.equals(query, other.query) - && Objects.equals(indexMode, other.indexMode); + && Objects.equals(query, other.query); } @Override public String nodeString() { - return nodeName() + "[" + index + "]" + NodeUtils.limitedToString(attributes); + return nodeName() + "[" + indexPattern + "]" + NodeUtils.limitedToString(attributes); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsStatsQueryExec.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsStatsQueryExec.java index 5a98ecc7d6594..96214652b87cb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsStatsQueryExec.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/physical/EsStatsQueryExec.java @@ -16,7 +16,6 @@ import org.elasticsearch.xpack.esql.core.tree.NodeUtils; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.util.Queries; -import org.elasticsearch.xpack.esql.index.EsIndex; import java.io.IOException; import java.util.List; @@ -44,7 +43,7 @@ public QueryBuilder filter(QueryBuilder sourceQuery) { } } - private final EsIndex index; + private final String indexPattern; private final QueryBuilder query; private final Expression limit; private final List attrs; @@ -52,14 +51,14 @@ public QueryBuilder filter(QueryBuilder sourceQuery) { public EsStatsQueryExec( Source source, - EsIndex index, + String indexPattern, QueryBuilder query, Expression limit, List attributes, List stats ) { super(source); - this.index = index; + this.indexPattern = indexPattern; this.query = query; this.limit = limit; this.attrs = attributes; @@ -78,11 +77,7 @@ public String getWriteableName() { @Override protected NodeInfo info() { - return NodeInfo.create(this, EsStatsQueryExec::new, index, query, limit, attrs, stats); - } - - public EsIndex index() { - return index; + return NodeInfo.create(this, EsStatsQueryExec::new, indexPattern, query, limit, attrs, stats); } public QueryBuilder query() { @@ -113,7 +108,7 @@ public PhysicalPlan estimateRowSize(State state) { @Override public int hashCode() { - return Objects.hash(index, query, limit, attrs, stats); + return Objects.hash(indexPattern, query, limit, attrs, stats); } @Override @@ -127,7 +122,7 @@ public boolean equals(Object obj) { } EsStatsQueryExec other = (EsStatsQueryExec) obj; - return Objects.equals(index, other.index) + return Objects.equals(indexPattern, other.indexPattern) && Objects.equals(attrs, other.attrs) && Objects.equals(query, other.query) && Objects.equals(limit, other.limit) @@ -138,7 +133,7 @@ public boolean equals(Object obj) { public String nodeString() { return nodeName() + "[" - + index + + indexPattern + "], stats" + stats + "], query[" diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java deleted file mode 100644 index c185bd5729879..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlExpressionTranslators.java +++ /dev/null @@ -1,598 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.planner; - -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.common.geo.ShapeRelation; -import org.elasticsearch.common.lucene.BytesRefs; -import org.elasticsearch.common.time.DateFormatter; -import org.elasticsearch.geometry.Geometry; -import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.FoldContext; -import org.elasticsearch.xpack.esql.core.expression.TranslationAware; -import org.elasticsearch.xpack.esql.core.expression.TypedAttribute; -import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.predicate.Range; -import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; -import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslator; -import org.elasticsearch.xpack.esql.core.planner.ExpressionTranslators; -import org.elasticsearch.xpack.esql.core.planner.TranslatorHandler; -import org.elasticsearch.xpack.esql.core.querydsl.query.MatchAll; -import org.elasticsearch.xpack.esql.core.querydsl.query.NotQuery; -import org.elasticsearch.xpack.esql.core.querydsl.query.Query; -import org.elasticsearch.xpack.esql.core.querydsl.query.QueryStringQuery; -import org.elasticsearch.xpack.esql.core.querydsl.query.RangeQuery; -import org.elasticsearch.xpack.esql.core.querydsl.query.TermQuery; -import org.elasticsearch.xpack.esql.core.querydsl.query.TermsQuery; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.core.type.MultiTypeEsField; -import org.elasticsearch.xpack.esql.core.util.Check; -import org.elasticsearch.xpack.esql.expression.function.fulltext.Kql; -import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; -import org.elasticsearch.xpack.esql.expression.function.fulltext.QueryString; -import org.elasticsearch.xpack.esql.expression.function.fulltext.Term; -import org.elasticsearch.xpack.esql.expression.function.scalar.convert.AbstractConvertFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; -import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesFunction; -import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.SpatialRelatesUtils; -import org.elasticsearch.xpack.esql.expression.predicate.fulltext.MultiMatchQueryPredicate; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.InsensitiveEquals; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThanOrEqual; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.NotEquals; -import org.elasticsearch.xpack.esql.querydsl.query.KqlQuery; -import org.elasticsearch.xpack.esql.querydsl.query.MatchQuery; -import org.elasticsearch.xpack.esql.querydsl.query.MultiMatchQuery; -import org.elasticsearch.xpack.esql.querydsl.query.SpatialRelatesQuery; -import org.elasticsearch.xpack.versionfield.Version; - -import java.math.BigDecimal; -import java.math.BigInteger; -import java.time.OffsetTime; -import java.time.ZoneId; -import java.time.ZonedDateTime; -import java.util.ArrayList; -import java.util.LinkedHashSet; -import java.util.List; -import java.util.Map; -import java.util.Set; - -import static org.elasticsearch.xpack.esql.core.expression.Foldables.valueOf; -import static org.elasticsearch.xpack.esql.core.planner.ExpressionTranslators.or; -import static org.elasticsearch.xpack.esql.core.type.DataType.IP; -import static org.elasticsearch.xpack.esql.core.type.DataType.UNSIGNED_LONG; -import static org.elasticsearch.xpack.esql.core.type.DataType.VERSION; -import static org.elasticsearch.xpack.esql.core.util.NumericUtils.unsignedLongAsNumber; -import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.DEFAULT_DATE_TIME_FORMATTER; -import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.HOUR_MINUTE_SECOND; -import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.dateTimeToString; -import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.ipToString; -import static org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter.versionToString; - -public final class EsqlExpressionTranslators { - - public static final List> QUERY_TRANSLATORS = List.of( - new EqualsIgnoreCaseTranslator(), - new BinaryComparisons(), - new SpatialRelatesTranslator(), - new InComparisons(), - new Ranges(), // Create Range in PushFiltersToSource for qualified pushable filters on the same field. - new ExpressionTranslators.BinaryLogic(), - new ExpressionTranslators.IsNulls(), - new ExpressionTranslators.IsNotNulls(), - new ExpressionTranslators.Nots(), - new ExpressionTranslators.Likes(), - new MultiMatches(), - new MatchFunctionTranslator(), - new QueryStringFunctionTranslator(), - new KqlFunctionTranslator(), - new TermFunctionTranslator(), - new Scalars() - ); - - public static Query toQuery(Expression e, TranslatorHandler handler) { - if (e instanceof TranslationAware ta) { - return ta.asQuery(handler); - } - Query translation = null; - - for (ExpressionTranslator translator : QUERY_TRANSLATORS) { - translation = translator.translate(e, handler); - if (translation != null) { - return translation; - } - } - - throw new QlIllegalArgumentException("Don't know how to translate {} {}", e.nodeName(), e); - } - - public static class EqualsIgnoreCaseTranslator extends ExpressionTranslator { - - @Override - protected Query asQuery(InsensitiveEquals bc, TranslatorHandler handler) { - return doTranslate(bc, handler); - } - - public static Query doTranslate(InsensitiveEquals bc, TranslatorHandler handler) { - checkInsensitiveComparison(bc); - return handler.wrapFunctionQuery(bc, bc.left(), () -> translate(bc)); - } - - public static void checkInsensitiveComparison(InsensitiveEquals bc) { - Check.isTrue( - bc.right().foldable(), - "Line {}:{}: Comparisons against fields are not (currently) supported; offender [{}] in [{}]", - bc.right().sourceLocation().getLineNumber(), - bc.right().sourceLocation().getColumnNumber(), - Expressions.name(bc.right()), - bc.symbol() - ); - } - - static Query translate(InsensitiveEquals bc) { - TypedAttribute attribute = checkIsPushableAttribute(bc.left()); - Source source = bc.source(); - BytesRef value = BytesRefs.toBytesRef(valueOf(FoldContext.small() /* TODO remove me */, bc.right())); - String name = pushableAttributeName(attribute); - return new TermQuery(source, name, value.utf8ToString(), true); - } - } - - /** - * This class is responsible for pushing the ES|QL Binary Comparison operators into Lucene. It covers: - *
      - *
    • {@link Equals}
    • - *
    • {@link NotEquals}
    • - *
    • {@link GreaterThanOrEqual}
    • - *
    • {@link GreaterThan}
    • - *
    • {@link LessThanOrEqual}
    • - *
    • {@link LessThan}
    • - *
    - * - * In general, we are able to push these down when one of the arguments is a constant (i.e. is foldable). This class assumes - * that an earlier pass through the query has rearranged things so that the foldable value will be the right hand side - * input to the operation. - */ - public static class BinaryComparisons extends ExpressionTranslator { - @Override - protected Query asQuery(BinaryComparison bc, TranslatorHandler handler) { - Check.isTrue( - bc.right().foldable(), - "Line {}:{}: Comparisons against fields are not (currently) supported; offender [{}] in [{}]", - bc.right().sourceLocation().getLineNumber(), - bc.right().sourceLocation().getColumnNumber(), - Expressions.name(bc.right()), - bc.symbol() - ); - - Query translated = translateOutOfRangeComparisons(bc); - if (translated != null) { - return handler.wrapFunctionQuery(bc, bc.left(), () -> translated); - } - return handler.wrapFunctionQuery(bc, bc.left(), () -> translate(bc, handler)); - } - - static Query translate(BinaryComparison bc, TranslatorHandler handler) { - TypedAttribute attribute = checkIsPushableAttribute(bc.left()); - Source source = bc.source(); - String name = handler.nameOf(attribute); - Object result = bc.right().fold(FoldContext.small() /* TODO remove me */); - Object value = result; - String format = null; - boolean isDateLiteralComparison = false; - - // TODO: This type coersion layer is copied directly from the QL counterpart code. It's probably not necessary or desireable - // in the ESQL version. We should instead do the type conversions using our casting functions. - // for a date constant comparison, we need to use a format for the date, to make sure that the format is the same - // no matter the timezone provided by the user - if (value instanceof ZonedDateTime || value instanceof OffsetTime) { - DateFormatter formatter; - if (value instanceof ZonedDateTime) { - formatter = DEFAULT_DATE_TIME_FORMATTER; - // RangeQueryBuilder accepts an Object as its parameter, but it will call .toString() on the ZonedDateTime instance - // which can have a slightly different format depending on the ZoneId used to create the ZonedDateTime - // Since RangeQueryBuilder can handle date as String as well, we'll format it as String and provide the format as well. - value = formatter.format((ZonedDateTime) value); - } else { - formatter = HOUR_MINUTE_SECOND; - value = formatter.format((OffsetTime) value); - } - format = formatter.pattern(); - isDateLiteralComparison = true; - } else if (attribute.dataType() == IP && value instanceof BytesRef bytesRef) { - value = ipToString(bytesRef); - } else if (attribute.dataType() == VERSION) { - // VersionStringFieldMapper#indexedValueForSearch() only accepts as input String or BytesRef with the String (i.e. not - // encoded) representation of the version as it'll do the encoding itself. - if (value instanceof BytesRef bytesRef) { - value = versionToString(bytesRef); - } else if (value instanceof Version version) { - value = versionToString(version); - } - } else if (attribute.dataType() == UNSIGNED_LONG && value instanceof Long ul) { - value = unsignedLongAsNumber(ul); - } - - ZoneId zoneId = null; - if (DataType.isDateTime(attribute.dataType())) { - zoneId = bc.zoneId(); - value = dateTimeToString((Long) value); - format = DEFAULT_DATE_TIME_FORMATTER.pattern(); - } - if (bc instanceof GreaterThan) { - return new RangeQuery(source, name, value, false, null, false, format, zoneId); - } - if (bc instanceof GreaterThanOrEqual) { - return new RangeQuery(source, name, value, true, null, false, format, zoneId); - } - if (bc instanceof LessThan) { - return new RangeQuery(source, name, null, false, value, false, format, zoneId); - } - if (bc instanceof LessThanOrEqual) { - return new RangeQuery(source, name, null, false, value, true, format, zoneId); - } - if (bc instanceof Equals || bc instanceof NotEquals) { - name = pushableAttributeName(attribute); - - Query query; - if (isDateLiteralComparison) { - // dates equality uses a range query because it's the one that has a "format" parameter - query = new RangeQuery(source, name, value, true, value, true, format, zoneId); - } else { - query = new TermQuery(source, name, value); - } - if (bc instanceof NotEquals) { - query = new NotQuery(source, query); - } - return query; - } - - throw new QlIllegalArgumentException("Don't know how to translate binary comparison [{}] in [{}]", bc.right().nodeString(), bc); - } - - private static Query translateOutOfRangeComparisons(BinaryComparison bc) { - if ((bc.left() instanceof FieldAttribute) == false - || bc.left().dataType().isNumeric() == false - || bc.right().foldable() == false) { - return null; - } - Source source = bc.source(); - Object value = valueOf(FoldContext.small() /* TODO remove me */, bc.right()); - - // Comparisons with multi-values always return null in ESQL. - if (value instanceof List) { - return new MatchAll(source).negate(source); - } - - DataType valueType = bc.right().dataType(); - DataType attributeDataType = bc.left().dataType(); - if (valueType == UNSIGNED_LONG && value instanceof Long ul) { - value = unsignedLongAsNumber(ul); - } - Number num = (Number) value; - if (isInRange(attributeDataType, valueType, num)) { - return null; - } - - if (Double.isNaN(((Number) value).doubleValue())) { - return new MatchAll(source).negate(source); - } - - boolean matchAllOrNone; - if (bc instanceof GreaterThan || bc instanceof GreaterThanOrEqual) { - matchAllOrNone = (num.doubleValue() > 0) == false; - } else if (bc instanceof LessThan || bc instanceof LessThanOrEqual) { - matchAllOrNone = (num.doubleValue() > 0); - } else if (bc instanceof Equals) { - matchAllOrNone = false; - } else if (bc instanceof NotEquals) { - matchAllOrNone = true; - } else { - throw new QlIllegalArgumentException("Unknown binary comparison [{}]", bc); - } - - return matchAllOrNone ? new MatchAll(source) : new MatchAll(source).negate(source); - } - - private static final BigDecimal HALF_FLOAT_MAX = BigDecimal.valueOf(65504); - private static final BigDecimal UNSIGNED_LONG_MAX = BigDecimal.valueOf(2).pow(64).subtract(BigDecimal.ONE); - - private static boolean isInRange(DataType numericFieldDataType, DataType valueDataType, Number value) { - double doubleValue = value.doubleValue(); - if (Double.isNaN(doubleValue) || Double.isInfinite(doubleValue)) { - return false; - } - - BigDecimal decimalValue; - if (value instanceof BigInteger bigIntValue) { - // Unsigned longs may be represented as BigInteger. - decimalValue = new BigDecimal(bigIntValue); - } else { - decimalValue = valueDataType.isRationalNumber() ? BigDecimal.valueOf(doubleValue) : BigDecimal.valueOf(value.longValue()); - } - - // Determine min/max for dataType. Use BigDecimals as doubles will have rounding errors for long/ulong. - BigDecimal minValue; - BigDecimal maxValue; - if (numericFieldDataType == DataType.BYTE) { - minValue = BigDecimal.valueOf(Byte.MIN_VALUE); - maxValue = BigDecimal.valueOf(Byte.MAX_VALUE); - } else if (numericFieldDataType == DataType.SHORT) { - minValue = BigDecimal.valueOf(Short.MIN_VALUE); - maxValue = BigDecimal.valueOf(Short.MAX_VALUE); - } else if (numericFieldDataType == DataType.INTEGER) { - minValue = BigDecimal.valueOf(Integer.MIN_VALUE); - maxValue = BigDecimal.valueOf(Integer.MAX_VALUE); - } else if (numericFieldDataType == DataType.LONG) { - minValue = BigDecimal.valueOf(Long.MIN_VALUE); - maxValue = BigDecimal.valueOf(Long.MAX_VALUE); - } else if (numericFieldDataType == DataType.UNSIGNED_LONG) { - minValue = BigDecimal.ZERO; - maxValue = UNSIGNED_LONG_MAX; - } else if (numericFieldDataType == DataType.HALF_FLOAT) { - minValue = HALF_FLOAT_MAX.negate(); - maxValue = HALF_FLOAT_MAX; - } else if (numericFieldDataType == DataType.FLOAT) { - minValue = BigDecimal.valueOf(-Float.MAX_VALUE); - maxValue = BigDecimal.valueOf(Float.MAX_VALUE); - } else if (numericFieldDataType == DataType.DOUBLE || numericFieldDataType == DataType.SCALED_FLOAT) { - // Scaled floats are represented as doubles in ESQL. - minValue = BigDecimal.valueOf(-Double.MAX_VALUE); - maxValue = BigDecimal.valueOf(Double.MAX_VALUE); - } else { - throw new QlIllegalArgumentException("Data type [{}] unsupported for numeric range check", numericFieldDataType); - } - - return minValue.compareTo(decimalValue) <= 0 && maxValue.compareTo(decimalValue) >= 0; - } - } - - public static class Scalars extends ExpressionTranslator { - @Override - protected Query asQuery(ScalarFunction f, TranslatorHandler handler) { - return doTranslate(f, handler); - } - - public static Query doTranslate(ScalarFunction f, TranslatorHandler handler) { - if (f instanceof CIDRMatch cm) { - if (cm.ipField() instanceof FieldAttribute fa && Expressions.foldable(cm.matches())) { - String targetFieldName = handler.nameOf(fa.exactAttribute()); - Set set = new LinkedHashSet<>(Expressions.fold(FoldContext.small() /* TODO remove me */, cm.matches())); - - Query query = new TermsQuery(f.source(), targetFieldName, set); - // CIDR_MATCH applies only to single values. - return handler.wrapFunctionQuery(f, cm.ipField(), () -> query); - } - } - // TODO we could optimize starts_with as well - - throw new QlIllegalArgumentException("Cannot translate expression:[" + f.sourceText() + "]"); - } - } - - public static class SpatialRelatesTranslator extends ExpressionTranslator { - - @Override - protected Query asQuery(SpatialRelatesFunction bc, TranslatorHandler handler) { - return doTranslate(bc, handler); - } - - public static void checkSpatialRelatesFunction(Expression constantExpression, ShapeRelation queryRelation) { - Check.isTrue( - constantExpression.foldable(), - "Line {}:{}: Comparisons against fields are not (currently) supported; offender [{}] in [ST_{}]", - constantExpression.sourceLocation().getLineNumber(), - constantExpression.sourceLocation().getColumnNumber(), - Expressions.name(constantExpression), - queryRelation - ); - } - - public static Query doTranslate(SpatialRelatesFunction bc, TranslatorHandler handler) { - if (bc.left().foldable()) { - checkSpatialRelatesFunction(bc.left(), bc.queryRelation()); - return translate(bc, handler, bc.right(), bc.left()); - } else { - checkSpatialRelatesFunction(bc.right(), bc.queryRelation()); - return translate(bc, handler, bc.left(), bc.right()); - } - } - - static Query translate( - SpatialRelatesFunction bc, - TranslatorHandler handler, - Expression spatialExpression, - Expression constantExpression - ) { - TypedAttribute attribute = checkIsPushableAttribute(spatialExpression); - String name = handler.nameOf(attribute); - - try { - Geometry shape = SpatialRelatesUtils.makeGeometryFromLiteral(FoldContext.small() /* TODO remove me */, constantExpression); - return new SpatialRelatesQuery(bc.source(), name, bc.queryRelation(), shape, attribute.dataType()); - } catch (IllegalArgumentException e) { - throw new QlIllegalArgumentException(e.getMessage(), e); - } - } - } - - public static class InComparisons extends ExpressionTranslator { - - @Override - protected Query asQuery(In in, TranslatorHandler handler) { - return doTranslate(in, handler); - } - - public static Query doTranslate(In in, TranslatorHandler handler) { - return handler.wrapFunctionQuery(in, in.value(), () -> translate(in, handler)); - } - - private static boolean needsTypeSpecificValueHandling(DataType fieldType) { - return DataType.isDateTime(fieldType) || fieldType == IP || fieldType == VERSION || fieldType == UNSIGNED_LONG; - } - - private static Query translate(In in, TranslatorHandler handler) { - TypedAttribute attribute = checkIsPushableAttribute(in.value()); - - Set terms = new LinkedHashSet<>(); - List queries = new ArrayList<>(); - - for (Expression rhs : in.list()) { - if (DataType.isNull(rhs.dataType()) == false) { - if (needsTypeSpecificValueHandling(attribute.dataType())) { - // delegates to BinaryComparisons translator to ensure consistent handling of date and time values - Query query = BinaryComparisons.translate(new Equals(in.source(), in.value(), rhs), handler); - - if (query instanceof TermQuery) { - terms.add(((TermQuery) query).value()); - } else { - queries.add(query); - } - } else { - terms.add(valueOf(FoldContext.small() /* TODO remove me */, rhs)); - } - } - } - - if (terms.isEmpty() == false) { - String fieldName = pushableAttributeName(attribute); - queries.add(new TermsQuery(in.source(), fieldName, terms)); - } - - return queries.stream().reduce((q1, q2) -> or(in.source(), q1, q2)).get(); - } - } - - public static class Ranges extends ExpressionTranslator { - - @Override - protected Query asQuery(Range r, TranslatorHandler handler) { - return doTranslate(r, handler); - } - - public static Query doTranslate(Range r, TranslatorHandler handler) { - return handler.wrapFunctionQuery(r, r.value(), () -> translate(r, handler)); - } - - private static RangeQuery translate(Range r, TranslatorHandler handler) { - Object lower = valueOf(FoldContext.small() /* TODO remove me */, r.lower()); - Object upper = valueOf(FoldContext.small() /* TODO remove me */, r.upper()); - String format = null; - - DataType dataType = r.value().dataType(); - if (DataType.isDateTime(dataType) && DataType.isDateTime(r.lower().dataType()) && DataType.isDateTime(r.upper().dataType())) { - lower = dateTimeToString((Long) lower); - upper = dateTimeToString((Long) upper); - format = DEFAULT_DATE_TIME_FORMATTER.pattern(); - } - - if (dataType == IP) { - if (lower instanceof BytesRef bytesRef) { - lower = ipToString(bytesRef); - } - if (upper instanceof BytesRef bytesRef) { - upper = ipToString(bytesRef); - } - } else if (dataType == VERSION) { - // VersionStringFieldMapper#indexedValueForSearch() only accepts as input String or BytesRef with the String (i.e. not - // encoded) representation of the version as it'll do the encoding itself. - if (lower instanceof BytesRef bytesRef) { - lower = versionToString(bytesRef); - } else if (lower instanceof Version version) { - lower = versionToString(version); - } - if (upper instanceof BytesRef bytesRef) { - upper = versionToString(bytesRef); - } else if (upper instanceof Version version) { - upper = versionToString(version); - } - } else if (dataType == UNSIGNED_LONG) { - if (lower instanceof Long ul) { - lower = unsignedLongAsNumber(ul); - } - if (upper instanceof Long ul) { - upper = unsignedLongAsNumber(ul); - } - } - return new RangeQuery( - r.source(), - handler.nameOf(r.value()), - lower, - r.includeLower(), - upper, - r.includeUpper(), - format, - r.zoneId() - ); - } - } - - public static class MultiMatches extends ExpressionTranslator { - - @Override - protected Query asQuery(MultiMatchQueryPredicate q, TranslatorHandler handler) { - return doTranslate(q, handler); - } - - public static Query doTranslate(MultiMatchQueryPredicate q, TranslatorHandler handler) { - return new MultiMatchQuery(q.source(), q.query(), q.fields(), q); - } - } - - public static class MatchFunctionTranslator extends ExpressionTranslator { - @Override - protected Query asQuery(Match match, TranslatorHandler handler) { - Expression fieldExpression = match.field(); - // Field may be converted to other data type (field_name :: data_type), so we need to check the original field - if (fieldExpression instanceof AbstractConvertFunction convertFunction) { - fieldExpression = convertFunction.field(); - } - if (fieldExpression instanceof FieldAttribute fieldAttribute) { - String fieldName = fieldAttribute.name(); - if (fieldAttribute.field() instanceof MultiTypeEsField multiTypeEsField) { - // If we have multiple field types, we allow the query to be done, but getting the underlying field name - fieldName = multiTypeEsField.getName(); - } - // Make query lenient so mixed field types can be queried when a field type is incompatible with the value provided - return new MatchQuery(match.source(), fieldName, match.queryAsObject(), Map.of("lenient", "true")); - } - - throw new IllegalArgumentException("Match must have a field attribute as the first argument"); - } - } - - public static class QueryStringFunctionTranslator extends ExpressionTranslator { - @Override - protected Query asQuery(QueryString queryString, TranslatorHandler handler) { - return new QueryStringQuery(queryString.source(), (String) queryString.queryAsObject(), Map.of(), Map.of()); - } - } - - public static class KqlFunctionTranslator extends ExpressionTranslator { - @Override - protected Query asQuery(Kql kqlFunction, TranslatorHandler handler) { - return new KqlQuery(kqlFunction.source(), (String) kqlFunction.queryAsObject()); - } - } - - public static class TermFunctionTranslator extends ExpressionTranslator { - @Override - protected Query asQuery(Term term, TranslatorHandler handler) { - return new TermQuery(term.source(), ((FieldAttribute) term.field()).name(), term.queryAsObject()); - } - } - -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java deleted file mode 100644 index 6fce6c43f12d4..0000000000000 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java +++ /dev/null @@ -1,57 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.esql.planner; - -import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.Expressions; -import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; -import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; -import org.elasticsearch.xpack.esql.core.planner.TranslatorHandler; -import org.elasticsearch.xpack.esql.core.querydsl.query.Query; -import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; - -import java.util.function.Supplier; - -public final class EsqlTranslatorHandler implements TranslatorHandler { - - @Override - public Query asQuery(Expression e) { - return EsqlExpressionTranslators.toQuery(e, this); - } - - @Override - public Query wrapFunctionQuery(ScalarFunction sf, Expression field, Supplier querySupplier) { - if (field instanceof FieldAttribute fa) { - if (fa.getExactInfo().hasExact()) { - var exact = fa.exactAttribute(); - if (exact != fa) { - fa = exact; - } - } - // don't wrap is null/is not null with SVQ - Query query = querySupplier.get(); - if ((sf instanceof IsNull || sf instanceof IsNotNull) == false) { - query = new SingleValueQuery(query, fa.name()); - } - return query; - } - if (field instanceof MetadataAttribute) { - return querySupplier.get(); // MetadataAttributes are always single valued - } - throw new EsqlIllegalArgumentException("Expected a FieldAttribute or MetadataAttribute but received [" + field + "]"); - } - - @Override - public String nameOf(Expression e) { - return Expressions.name(e); - } -} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index ecd0284c7cb57..00bdf0a019096 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -562,7 +562,7 @@ private PhysicalOperation planLookupJoin(LookupJoinExec join, LocalExecutionPlan if (localSourceExec.indexMode() != IndexMode.LOOKUP) { throw new IllegalArgumentException("can't plan [" + join + "]"); } - Map indicesWithModes = localSourceExec.index().indexNameWithModes(); + Map indicesWithModes = localSourceExec.indexNameWithModes(); if (indicesWithModes.size() != 1) { throw new IllegalArgumentException("can't plan [" + join + "], found more than 1 index"); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index a44eb3bbe75f0..7d6d7d9b3c8c9 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -21,12 +21,12 @@ import org.elasticsearch.xpack.esql.core.expression.AttributeSet; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.core.tree.Node; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.Holder; import org.elasticsearch.xpack.esql.core.util.Queries; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalOptimizerContext; import org.elasticsearch.xpack.esql.optimizer.LocalLogicalPlanOptimizer; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalOptimizerContext; @@ -63,11 +63,10 @@ import static org.elasticsearch.index.mapper.MappedFieldType.FieldExtractPreference.NONE; import static org.elasticsearch.xpack.esql.core.util.Queries.Clause.FILTER; import static org.elasticsearch.xpack.esql.optimizer.rules.physical.local.PushFiltersToSource.canPushToSource; +import static org.elasticsearch.xpack.esql.planner.TranslatorHandler.TRANSLATOR_HANDLER; public class PlannerUtils { - public static final EsqlTranslatorHandler TRANSLATOR_HANDLER = new EsqlTranslatorHandler(); - public static Tuple breakPlanBetweenCoordinatorAndDataNode(PhysicalPlan plan, Configuration config) { var dataNodePlan = new Holder(); @@ -111,7 +110,7 @@ public static Set planConcreteIndices(PhysicalPlan plan) { return Set.of(); } var indices = new LinkedHashSet(); - forEachFromRelation(plan, relation -> indices.addAll(relation.index().concreteIndices())); + forEachFromRelation(plan, relation -> indices.addAll(relation.concreteIndices())); return indices; } @@ -123,7 +122,7 @@ public static String[] planOriginalIndices(PhysicalPlan plan) { return Strings.EMPTY_ARRAY; } var indices = new LinkedHashSet(); - forEachFromRelation(plan, relation -> indices.addAll(asList(Strings.commaDelimitedListToStringArray(relation.index().name())))); + forEachFromRelation(plan, relation -> indices.addAll(asList(Strings.commaDelimitedListToStringArray(relation.indexPattern())))); return indices.toArray(String[]::new); } @@ -193,7 +192,14 @@ public static PhysicalPlan localPlan( if (filter != null) { physicalFragment = physicalFragment.transformUp( EsSourceExec.class, - query -> new EsSourceExec(Source.EMPTY, query.index(), query.output(), filter, query.indexMode()) + query -> new EsSourceExec( + Source.EMPTY, + query.indexPattern(), + query.indexMode(), + query.indexNameWithModes(), + query.output(), + filter + ) ); } var localOptimized = physicalOptimizer.localOptimize(physicalFragment); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/TranslatorHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/TranslatorHandler.java new file mode 100644 index 0000000000000..f7f09d36a4296 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/TranslatorHandler.java @@ -0,0 +1,56 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.planner; + +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; +import org.elasticsearch.xpack.esql.core.QlIllegalArgumentException; +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.Expressions; +import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; +import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; +import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; + +/** + * Handler used during query translation. + * + * Expressions that need to translate children into queries during own translation should always use this handler, as it provides + * SingleValueQuery-wrapping when necessary. + */ +public final class TranslatorHandler { + + public static final TranslatorHandler TRANSLATOR_HANDLER = new TranslatorHandler(); + + private TranslatorHandler() {} + + public Query asQuery(Expression e) { + if (e instanceof TranslationAware ta) { + Query query = ta.asQuery(this); + return ta instanceof TranslationAware.SingleValueTranslationAware sv ? wrapFunctionQuery(sv.singleValueField(), query) : query; + } + + throw new QlIllegalArgumentException("Don't know how to translate {} {}", e.nodeName(), e); + } + + private static Query wrapFunctionQuery(Expression field, Query query) { + if (field instanceof FieldAttribute fa) { + fa = fa.getExactInfo().hasExact() ? fa.exactAttribute() : fa; + return new SingleValueQuery(query, fa.name()); + } + if (field instanceof MetadataAttribute) { + return query; // MetadataAttributes are always single valued + } + throw new EsqlIllegalArgumentException("Expected a FieldAttribute or MetadataAttribute but received [" + field + "]"); + } + + // TODO: is this method necessary? + public String nameOf(Expression e) { + return Expressions.name(e); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java new file mode 100644 index 0000000000000..1f2b8faf83ee3 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ClusterComputeHandler.java @@ -0,0 +1,230 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.support.ChannelActionListener; +import org.elasticsearch.compute.EsqlRefCountingListener; +import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.RemoteClusterService; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; +import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.session.Configuration; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Executor; + +/** + * Manages computes across multiple clusters by sending {@link ClusterComputeRequest} to remote clusters and executing the computes. + * This handler delegates the execution of computes on data nodes within each remote cluster to {@link DataNodeComputeHandler}. + */ +final class ClusterComputeHandler implements TransportRequestHandler { + private final ComputeService computeService; + private final ExchangeService exchangeService; + private final TransportService transportService; + private final Executor esqlExecutor; + private final DataNodeComputeHandler dataNodeComputeHandler; + + ClusterComputeHandler( + ComputeService computeService, + ExchangeService exchangeService, + TransportService transportService, + Executor esqlExecutor, + DataNodeComputeHandler dataNodeComputeHandler + ) { + this.computeService = computeService; + this.exchangeService = exchangeService; + this.esqlExecutor = esqlExecutor; + this.transportService = transportService; + this.dataNodeComputeHandler = dataNodeComputeHandler; + transportService.registerRequestHandler(ComputeService.CLUSTER_ACTION_NAME, esqlExecutor, ClusterComputeRequest::new, this); + } + + void startComputeOnRemoteClusters( + String sessionId, + CancellableTask rootTask, + Configuration configuration, + PhysicalPlan plan, + ExchangeSourceHandler exchangeSource, + List clusters, + ComputeListener computeListener + ) { + var queryPragmas = configuration.pragmas(); + var linkExchangeListeners = ActionListener.releaseAfter(computeListener.acquireAvoid(), exchangeSource.addEmptySink()); + try (EsqlRefCountingListener refs = new EsqlRefCountingListener(linkExchangeListeners)) { + for (RemoteCluster cluster : clusters) { + final var childSessionId = computeService.newChildSession(sessionId); + ExchangeService.openExchange( + transportService, + cluster.connection, + childSessionId, + queryPragmas.exchangeBufferSize(), + esqlExecutor, + refs.acquire().delegateFailureAndWrap((l, unused) -> { + var remoteSink = exchangeService.newRemoteSink(rootTask, childSessionId, transportService, cluster.connection); + exchangeSource.addRemoteSink(remoteSink, true, queryPragmas.concurrentExchangeClients(), ActionListener.noop()); + var remotePlan = new RemoteClusterPlan(plan, cluster.concreteIndices, cluster.originalIndices); + var clusterRequest = new ClusterComputeRequest(cluster.clusterAlias, childSessionId, configuration, remotePlan); + var clusterListener = ActionListener.runBefore( + computeListener.acquireCompute(cluster.clusterAlias()), + () -> l.onResponse(null) + ); + transportService.sendChildRequest( + cluster.connection, + ComputeService.CLUSTER_ACTION_NAME, + clusterRequest, + rootTask, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(clusterListener, ComputeResponse::new, esqlExecutor) + ); + }) + ); + } + } + } + + List getRemoteClusters( + Map clusterToConcreteIndices, + Map clusterToOriginalIndices + ) { + List remoteClusters = new ArrayList<>(clusterToConcreteIndices.size()); + RemoteClusterService remoteClusterService = transportService.getRemoteClusterService(); + for (Map.Entry e : clusterToConcreteIndices.entrySet()) { + String clusterAlias = e.getKey(); + OriginalIndices concreteIndices = clusterToConcreteIndices.get(clusterAlias); + OriginalIndices originalIndices = clusterToOriginalIndices.get(clusterAlias); + if (originalIndices == null) { + assert false : "can't find original indices for cluster " + clusterAlias; + throw new IllegalStateException("can't find original indices for cluster " + clusterAlias); + } + if (concreteIndices.indices().length > 0) { + Transport.Connection connection = remoteClusterService.getConnection(clusterAlias); + remoteClusters.add(new RemoteCluster(clusterAlias, connection, concreteIndices.indices(), originalIndices)); + } + } + return remoteClusters; + } + + record RemoteCluster(String clusterAlias, Transport.Connection connection, String[] concreteIndices, OriginalIndices originalIndices) { + + } + + @Override + public void messageReceived(ClusterComputeRequest request, TransportChannel channel, Task task) { + ChannelActionListener listener = new ChannelActionListener<>(channel); + RemoteClusterPlan remoteClusterPlan = request.remoteClusterPlan(); + var plan = remoteClusterPlan.plan(); + if (plan instanceof ExchangeSinkExec == false) { + listener.onFailure(new IllegalStateException("expected exchange sink for a remote compute; got " + plan)); + return; + } + String clusterAlias = request.clusterAlias(); + /* + * This handler runs only on remote cluster coordinators, so it creates a new local EsqlExecutionInfo object to record + * execution metadata for ES|QL processing local to this cluster. The execution info will be copied into the + * ComputeResponse that is sent back to the primary coordinating cluster. + */ + EsqlExecutionInfo execInfo = new EsqlExecutionInfo(true); + execInfo.swapCluster(clusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(clusterAlias, Arrays.toString(request.indices()))); + CancellableTask cancellable = (CancellableTask) task; + try (var computeListener = ComputeListener.create(clusterAlias, transportService, cancellable, execInfo, listener)) { + runComputeOnRemoteCluster( + clusterAlias, + request.sessionId(), + (CancellableTask) task, + request.configuration(), + (ExchangeSinkExec) plan, + Set.of(remoteClusterPlan.targetIndices()), + remoteClusterPlan.originalIndices(), + execInfo, + computeListener + ); + } + } + + /** + * Performs a compute on a remote cluster. The output pages are placed in an exchange sink specified by + * {@code globalSessionId}. The coordinator on the main cluster will poll pages from there. + *

    + * Currently, the coordinator on the remote cluster polls pages from data nodes within the remote cluster + * and performs cluster-level reduction before sending pages to the querying cluster. This reduction aims + * to minimize data transfers across clusters but may require additional CPU resources for operations like + * aggregations. + */ + void runComputeOnRemoteCluster( + String clusterAlias, + String globalSessionId, + CancellableTask parentTask, + Configuration configuration, + ExchangeSinkExec plan, + Set concreteIndices, + OriginalIndices originalIndices, + EsqlExecutionInfo executionInfo, + ComputeListener computeListener + ) { + final var exchangeSink = exchangeService.getSinkHandler(globalSessionId); + parentTask.addListener( + () -> exchangeService.finishSinkHandler(globalSessionId, new TaskCancelledException(parentTask.getReasonCancelled())) + ); + final String localSessionId = clusterAlias + ":" + globalSessionId; + final PhysicalPlan coordinatorPlan = ComputeService.reductionPlan(plan, true); + var exchangeSource = new ExchangeSourceHandler( + configuration.pragmas().exchangeBufferSize(), + transportService.getThreadPool().executor(ThreadPool.Names.SEARCH), + computeListener.acquireAvoid() + ); + try (Releasable ignored = exchangeSource.addEmptySink()) { + exchangeSink.addCompletionListener(computeListener.acquireAvoid()); + computeService.runCompute( + parentTask, + new ComputeContext( + localSessionId, + clusterAlias, + List.of(), + configuration, + configuration.newFoldContext(), + exchangeSource, + exchangeSink + ), + coordinatorPlan, + computeListener.acquireCompute(clusterAlias) + ); + dataNodeComputeHandler.startComputeOnDataNodes( + localSessionId, + clusterAlias, + parentTask, + configuration, + plan, + concreteIndices, + originalIndices, + exchangeSource, + executionInfo, + computeListener + ); + } + } + +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java new file mode 100644 index 0000000000000..4e178bb740757 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeContext.java @@ -0,0 +1,31 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; +import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.session.Configuration; + +import java.util.List; + +record ComputeContext( + String sessionId, + String clusterAlias, + List searchContexts, + Configuration configuration, + FoldContext foldCtx, + ExchangeSourceHandler exchangeSource, + ExchangeSinkHandler exchangeSink +) { + List searchExecutionContexts() { + return searchContexts.stream().map(SearchContext::getSearchExecutionContext).toList(); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index a38236fe60954..2cb4b49ec3591 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -8,58 +8,31 @@ package org.elasticsearch.xpack.esql.plugin; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionListenerResponseHandler; -import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.search.SearchShardsGroup; -import org.elasticsearch.action.search.SearchShardsRequest; -import org.elasticsearch.action.search.SearchShardsResponse; -import org.elasticsearch.action.support.ChannelActionListener; -import org.elasticsearch.action.support.RefCountingRunnable; -import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.EsqlRefCountingListener; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverTaskRunner; import org.elasticsearch.compute.operator.exchange.ExchangeService; -import org.elasticsearch.compute.operator.exchange.ExchangeSink; -import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; -import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.Tuple; -import org.elasticsearch.index.Index; -import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; -import org.elasticsearch.index.shard.IndexShard; -import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.index.shard.ShardNotFoundException; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import org.elasticsearch.search.SearchService; -import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.SearchContext; -import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.search.lookup.SourceProvider; import org.elasticsearch.tasks.CancellableTask; -import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterAware; -import org.elasticsearch.transport.RemoteClusterService; -import org.elasticsearch.transport.Transport; -import org.elasticsearch.transport.TransportChannel; -import org.elasticsearch.transport.TransportRequestHandler; -import org.elasticsearch.transport.TransportRequestOptions; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; -import org.elasticsearch.xpack.esql.action.EsqlSearchShardsAction; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; @@ -75,14 +48,10 @@ import org.elasticsearch.xpack.esql.session.Result; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; -import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; -import java.util.concurrent.Executor; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Supplier; @@ -92,20 +61,24 @@ * Computes the result of a {@link PhysicalPlan}. */ public class ComputeService { + public static final String DATA_ACTION_NAME = EsqlQueryAction.NAME + "/data"; + public static final String CLUSTER_ACTION_NAME = EsqlQueryAction.NAME + "/cluster"; + private static final Logger LOGGER = LogManager.getLogger(ComputeService.class); private final SearchService searchService; private final BigArrays bigArrays; private final BlockFactory blockFactory; private final TransportService transportService; - private final Executor esqlExecutor; private final DriverTaskRunner driverRunner; - private final ExchangeService exchangeService; private final EnrichLookupService enrichLookupService; private final LookupFromIndexService lookupFromIndexService; private final ClusterService clusterService; private final AtomicLong childSessionIdGenerator = new AtomicLong(); + private final DataNodeComputeHandler dataNodeComputeHandler; + private final ClusterComputeHandler clusterComputeHandler; + @SuppressWarnings("this-escape") public ComputeService( SearchService searchService, TransportService transportService, @@ -121,19 +94,19 @@ public ComputeService( this.transportService = transportService; this.bigArrays = bigArrays.withCircuitBreaking(); this.blockFactory = blockFactory; - this.esqlExecutor = threadPool.executor(ThreadPool.Names.SEARCH); - transportService.registerRequestHandler(DATA_ACTION_NAME, this.esqlExecutor, DataNodeRequest::new, new DataNodeRequestHandler()); - transportService.registerRequestHandler( - CLUSTER_ACTION_NAME, - this.esqlExecutor, - ClusterComputeRequest::new, - new ClusterRequestHandler() - ); - this.driverRunner = new DriverTaskRunner(transportService, this.esqlExecutor); - this.exchangeService = exchangeService; + var esqlExecutor = threadPool.executor(ThreadPool.Names.SEARCH); + this.driverRunner = new DriverTaskRunner(transportService, esqlExecutor); this.enrichLookupService = enrichLookupService; this.lookupFromIndexService = lookupFromIndexService; this.clusterService = clusterService; + this.dataNodeComputeHandler = new DataNodeComputeHandler(this, searchService, transportService, exchangeService, esqlExecutor); + this.clusterComputeHandler = new ClusterComputeHandler( + this, + exchangeService, + transportService, + esqlExecutor, + dataNodeComputeHandler + ); } public void execute( @@ -238,7 +211,7 @@ public void execute( ); // starts computes on data nodes on the main cluster if (localConcreteIndices != null && localConcreteIndices.indices().length > 0) { - startComputeOnDataNodes( + dataNodeComputeHandler.startComputeOnDataNodes( sessionId, RemoteClusterAware.LOCAL_CLUSTER_GROUP_KEY, rootTask, @@ -252,13 +225,14 @@ public void execute( ); } // starts computes on remote clusters - startComputeOnRemoteClusters( + final var remoteClusters = clusterComputeHandler.getRemoteClusters(clusterToConcreteIndices, clusterToOriginalIndices); + clusterComputeHandler.startComputeOnRemoteClusters( sessionId, rootTask, configuration, dataNodePlan, exchangeSource, - getRemoteClusters(clusterToConcreteIndices, clusterToOriginalIndices), + remoteClusters, computeListener ); } @@ -298,145 +272,11 @@ private static void updateExecutionInfoAfterCoordinatorOnlyQuery(EsqlExecutionIn } } - private List getRemoteClusters( - Map clusterToConcreteIndices, - Map clusterToOriginalIndices - ) { - List remoteClusters = new ArrayList<>(clusterToConcreteIndices.size()); - RemoteClusterService remoteClusterService = transportService.getRemoteClusterService(); - for (Map.Entry e : clusterToConcreteIndices.entrySet()) { - String clusterAlias = e.getKey(); - OriginalIndices concreteIndices = clusterToConcreteIndices.get(clusterAlias); - OriginalIndices originalIndices = clusterToOriginalIndices.get(clusterAlias); - if (originalIndices == null) { - assert false : "can't find original indices for cluster " + clusterAlias; - throw new IllegalStateException("can't find original indices for cluster " + clusterAlias); - } - if (concreteIndices.indices().length > 0) { - Transport.Connection connection = remoteClusterService.getConnection(clusterAlias); - remoteClusters.add(new RemoteCluster(clusterAlias, connection, concreteIndices.indices(), originalIndices)); - } - } - return remoteClusters; - } - - private void startComputeOnDataNodes( - String sessionId, - String clusterAlias, - CancellableTask parentTask, - Configuration configuration, - PhysicalPlan dataNodePlan, - Set concreteIndices, - OriginalIndices originalIndices, - ExchangeSourceHandler exchangeSource, - EsqlExecutionInfo executionInfo, - ComputeListener computeListener - ) { - QueryBuilder requestFilter = PlannerUtils.requestTimestampFilter(dataNodePlan); - var lookupListener = ActionListener.releaseAfter(computeListener.acquireAvoid(), exchangeSource.addEmptySink()); - // SearchShards API can_match is done in lookupDataNodes - lookupDataNodes(parentTask, clusterAlias, requestFilter, concreteIndices, originalIndices, ActionListener.wrap(dataNodeResult -> { - try (EsqlRefCountingListener refs = new EsqlRefCountingListener(lookupListener)) { - // update ExecutionInfo with shard counts (total and skipped) - executionInfo.swapCluster( - clusterAlias, - (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setTotalShards(dataNodeResult.totalShards()) - // do not set successful or failed shard count here - do it when search is done - .setSkippedShards(dataNodeResult.skippedShards()) - .build() - ); - - // For each target node, first open a remote exchange on the remote node, then link the exchange source to - // the new remote exchange sink, and initialize the computation on the target node via data-node-request. - for (DataNode node : dataNodeResult.dataNodes()) { - var queryPragmas = configuration.pragmas(); - var childSessionId = newChildSession(sessionId); - ExchangeService.openExchange( - transportService, - node.connection, - childSessionId, - queryPragmas.exchangeBufferSize(), - esqlExecutor, - refs.acquire().delegateFailureAndWrap((l, unused) -> { - var remoteSink = exchangeService.newRemoteSink(parentTask, childSessionId, transportService, node.connection); - exchangeSource.addRemoteSink(remoteSink, true, queryPragmas.concurrentExchangeClients(), ActionListener.noop()); - ActionListener computeResponseListener = computeListener.acquireCompute(clusterAlias); - var dataNodeListener = ActionListener.runBefore(computeResponseListener, () -> l.onResponse(null)); - final boolean sameNode = transportService.getLocalNode().getId().equals(node.connection.getNode().getId()); - var dataNodeRequest = new DataNodeRequest( - childSessionId, - configuration, - clusterAlias, - node.shardIds, - node.aliasFilters, - dataNodePlan, - originalIndices.indices(), - originalIndices.indicesOptions(), - sameNode == false && queryPragmas.nodeLevelReduction() - ); - transportService.sendChildRequest( - node.connection, - DATA_ACTION_NAME, - dataNodeRequest, - parentTask, - TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(dataNodeListener, ComputeResponse::new, esqlExecutor) - ); - }) - ); - } - } - }, lookupListener::onFailure)); - } - - private void startComputeOnRemoteClusters( - String sessionId, - CancellableTask rootTask, - Configuration configuration, - PhysicalPlan plan, - ExchangeSourceHandler exchangeSource, - List clusters, - ComputeListener computeListener - ) { - var queryPragmas = configuration.pragmas(); - var linkExchangeListeners = ActionListener.releaseAfter(computeListener.acquireAvoid(), exchangeSource.addEmptySink()); - try (EsqlRefCountingListener refs = new EsqlRefCountingListener(linkExchangeListeners)) { - for (RemoteCluster cluster : clusters) { - final var childSessionId = newChildSession(sessionId); - ExchangeService.openExchange( - transportService, - cluster.connection, - childSessionId, - queryPragmas.exchangeBufferSize(), - esqlExecutor, - refs.acquire().delegateFailureAndWrap((l, unused) -> { - var remoteSink = exchangeService.newRemoteSink(rootTask, childSessionId, transportService, cluster.connection); - exchangeSource.addRemoteSink(remoteSink, true, queryPragmas.concurrentExchangeClients(), ActionListener.noop()); - var remotePlan = new RemoteClusterPlan(plan, cluster.concreteIndices, cluster.originalIndices); - var clusterRequest = new ClusterComputeRequest(cluster.clusterAlias, childSessionId, configuration, remotePlan); - var clusterListener = ActionListener.runBefore( - computeListener.acquireCompute(cluster.clusterAlias()), - () -> l.onResponse(null) - ); - transportService.sendChildRequest( - cluster.connection, - CLUSTER_ACTION_NAME, - clusterRequest, - rootTask, - TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(clusterListener, ComputeResponse::new, esqlExecutor) - ); - }) - ); - } - } - } - void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, ActionListener listener) { - listener = ActionListener.runBefore(listener, () -> Releasables.close(context.searchContexts)); - List contexts = new ArrayList<>(context.searchContexts.size()); - for (int i = 0; i < context.searchContexts.size(); i++) { - SearchContext searchContext = context.searchContexts.get(i); + listener = ActionListener.runBefore(listener, () -> Releasables.close(context.searchContexts())); + List contexts = new ArrayList<>(context.searchContexts().size()); + for (int i = 0; i < context.searchContexts().size(); i++) { + SearchContext searchContext = context.searchContexts().get(i); var searchExecutionContext = new SearchExecutionContext(searchContext.getSearchExecutionContext()) { @Override @@ -453,13 +293,13 @@ public SourceProvider createSourceProvider() { final List drivers; try { LocalExecutionPlanner planner = new LocalExecutionPlanner( - context.sessionId, - context.clusterAlias, + context.sessionId(), + context.clusterAlias(), task, bigArrays, blockFactory, clusterService.getSettings(), - context.configuration, + context.configuration(), context.exchangeSource(), context.exchangeSink(), enrichLookupService, @@ -469,7 +309,7 @@ public SourceProvider createSourceProvider() { LOGGER.debug("Received physical plan:\n{}", plan); - plan = PlannerUtils.localPlan(context.searchExecutionContexts(), context.configuration, context.foldCtx(), plan); + plan = PlannerUtils.localPlan(context.searchExecutionContexts(), context.configuration(), context.foldCtx(), plan); // the planner will also set the driver parallelism in LocalExecutionPlanner.LocalExecutionPlan (used down below) // it's doing this in the planning of EsQueryExec (the source of the data) // see also EsPhysicalOperationProviders.sourcePhysicalOperation @@ -477,7 +317,7 @@ public SourceProvider createSourceProvider() { if (LOGGER.isDebugEnabled()) { LOGGER.debug("Local execution plan:\n{}", localExecutionPlan.describe()); } - drivers = localExecutionPlan.createDrivers(context.sessionId); + drivers = localExecutionPlan.createDrivers(context.sessionId()); if (drivers.isEmpty()) { throw new IllegalStateException("no drivers created"); } @@ -487,7 +327,7 @@ public SourceProvider createSourceProvider() { return; } ActionListener listenerCollectingStatus = listener.map(ignored -> { - if (context.configuration.profile()) { + if (context.configuration().profile()) { return new ComputeResponse(drivers.stream().map(Driver::profile).toList()); } else { final ComputeResponse response = new ComputeResponse(List.of()); @@ -503,306 +343,7 @@ public SourceProvider createSourceProvider() { ); } - private void acquireSearchContexts( - String clusterAlias, - List shardIds, - Configuration configuration, - Map aliasFilters, - ActionListener> listener - ) { - final List targetShards = new ArrayList<>(); - try { - for (ShardId shardId : shardIds) { - var indexShard = searchService.getIndicesService().indexServiceSafe(shardId.getIndex()).getShard(shardId.id()); - targetShards.add(indexShard); - } - } catch (Exception e) { - listener.onFailure(e); - return; - } - final var doAcquire = ActionRunnable.supply(listener, () -> { - final List searchContexts = new ArrayList<>(targetShards.size()); - boolean success = false; - try { - for (IndexShard shard : targetShards) { - var aliasFilter = aliasFilters.getOrDefault(shard.shardId().getIndex(), AliasFilter.EMPTY); - var shardRequest = new ShardSearchRequest( - shard.shardId(), - configuration.absoluteStartedTimeInMillis(), - aliasFilter, - clusterAlias - ); - // TODO: `searchService.createSearchContext` allows opening search contexts without limits, - // we need to limit the number of active search contexts here or in SearchService - SearchContext context = searchService.createSearchContext(shardRequest, SearchService.NO_TIMEOUT); - searchContexts.add(context); - } - for (SearchContext searchContext : searchContexts) { - searchContext.preProcess(); - } - success = true; - return searchContexts; - } finally { - if (success == false) { - IOUtils.close(searchContexts); - } - } - }); - final AtomicBoolean waitedForRefreshes = new AtomicBoolean(); - try (RefCountingRunnable refs = new RefCountingRunnable(() -> { - if (waitedForRefreshes.get()) { - esqlExecutor.execute(doAcquire); - } else { - doAcquire.run(); - } - })) { - for (IndexShard targetShard : targetShards) { - final Releasable ref = refs.acquire(); - targetShard.ensureShardSearchActive(await -> { - try (ref) { - if (await) { - waitedForRefreshes.set(true); - } - } - }); - } - } - } - - record DataNode(Transport.Connection connection, List shardIds, Map aliasFilters) { - - } - - /** - * Result from lookupDataNodes where can_match is performed to determine what shards can be skipped - * and which target nodes are needed for running the ES|QL query - * - * @param dataNodes list of DataNode to perform the ES|QL query on - * @param totalShards Total number of shards (from can_match phase), including skipped shards - * @param skippedShards Number of skipped shards (from can_match phase) - */ - record DataNodeResult(List dataNodes, int totalShards, int skippedShards) {} - - record RemoteCluster(String clusterAlias, Transport.Connection connection, String[] concreteIndices, OriginalIndices originalIndices) { - - } - - /** - * Performs can_match and find the target nodes for the given target indices and filter. - *

    - * Ideally, the search_shards API should be called before the field-caps API; however, this can lead - * to a situation where the column structure (i.e., matched data types) differs depending on the query. - */ - private void lookupDataNodes( - Task parentTask, - String clusterAlias, - QueryBuilder filter, - Set concreteIndices, - OriginalIndices originalIndices, - ActionListener listener - ) { - ActionListener searchShardsListener = listener.map(resp -> { - Map nodes = new HashMap<>(); - for (DiscoveryNode node : resp.getNodes()) { - nodes.put(node.getId(), node); - } - Map> nodeToShards = new HashMap<>(); - Map> nodeToAliasFilters = new HashMap<>(); - int totalShards = 0; - int skippedShards = 0; - for (SearchShardsGroup group : resp.getGroups()) { - var shardId = group.shardId(); - if (group.allocatedNodes().isEmpty()) { - throw new ShardNotFoundException(group.shardId(), "no shard copies found {}", group.shardId()); - } - if (concreteIndices.contains(shardId.getIndexName()) == false) { - continue; - } - totalShards++; - if (group.skipped()) { - skippedShards++; - continue; - } - String targetNode = group.allocatedNodes().get(0); - nodeToShards.computeIfAbsent(targetNode, k -> new ArrayList<>()).add(shardId); - AliasFilter aliasFilter = resp.getAliasFilters().get(shardId.getIndex().getUUID()); - if (aliasFilter != null) { - nodeToAliasFilters.computeIfAbsent(targetNode, k -> new HashMap<>()).put(shardId.getIndex(), aliasFilter); - } - } - List dataNodes = new ArrayList<>(nodeToShards.size()); - for (Map.Entry> e : nodeToShards.entrySet()) { - DiscoveryNode node = nodes.get(e.getKey()); - Map aliasFilters = nodeToAliasFilters.getOrDefault(e.getKey(), Map.of()); - dataNodes.add(new DataNode(transportService.getConnection(node), e.getValue(), aliasFilters)); - } - return new DataNodeResult(dataNodes, totalShards, skippedShards); - }); - SearchShardsRequest searchShardsRequest = new SearchShardsRequest( - originalIndices.indices(), - originalIndices.indicesOptions(), - filter, - null, - null, - false, - clusterAlias - ); - transportService.sendChildRequest( - transportService.getLocalNode(), - EsqlSearchShardsAction.TYPE.name(), - searchShardsRequest, - parentTask, - TransportRequestOptions.EMPTY, - new ActionListenerResponseHandler<>(searchShardsListener, SearchShardsResponse::new, esqlExecutor) - ); - } - - // TODO: Use an internal action here - public static final String DATA_ACTION_NAME = EsqlQueryAction.NAME + "/data"; - - private class DataNodeRequestExecutor { - private final DataNodeRequest request; - private final CancellableTask parentTask; - private final ExchangeSinkHandler exchangeSink; - private final ComputeListener computeListener; - private final int maxConcurrentShards; - private final ExchangeSink blockingSink; // block until we have completed on all shards or the coordinator has enough data - - DataNodeRequestExecutor( - DataNodeRequest request, - CancellableTask parentTask, - ExchangeSinkHandler exchangeSink, - int maxConcurrentShards, - ComputeListener computeListener - ) { - this.request = request; - this.parentTask = parentTask; - this.exchangeSink = exchangeSink; - this.computeListener = computeListener; - this.maxConcurrentShards = maxConcurrentShards; - this.blockingSink = exchangeSink.createExchangeSink(); - } - - void start() { - parentTask.addListener( - () -> exchangeService.finishSinkHandler(request.sessionId(), new TaskCancelledException(parentTask.getReasonCancelled())) - ); - runBatch(0); - } - - private void runBatch(int startBatchIndex) { - final Configuration configuration = request.configuration(); - final String clusterAlias = request.clusterAlias(); - final var sessionId = request.sessionId(); - final int endBatchIndex = Math.min(startBatchIndex + maxConcurrentShards, request.shardIds().size()); - List shardIds = request.shardIds().subList(startBatchIndex, endBatchIndex); - ActionListener batchListener = new ActionListener<>() { - final ActionListener ref = computeListener.acquireCompute(); - - @Override - public void onResponse(ComputeResponse result) { - try { - onBatchCompleted(endBatchIndex); - } finally { - ref.onResponse(result); - } - } - - @Override - public void onFailure(Exception e) { - try { - exchangeService.finishSinkHandler(request.sessionId(), e); - } finally { - ref.onFailure(e); - } - } - }; - acquireSearchContexts(clusterAlias, shardIds, configuration, request.aliasFilters(), ActionListener.wrap(searchContexts -> { - assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.SEARCH, ESQL_WORKER_THREAD_POOL_NAME); - var computeContext = new ComputeContext( - sessionId, - clusterAlias, - searchContexts, - configuration, - configuration.newFoldContext(), - null, - exchangeSink - ); - runCompute(parentTask, computeContext, request.plan(), batchListener); - }, batchListener::onFailure)); - } - - private void onBatchCompleted(int lastBatchIndex) { - if (lastBatchIndex < request.shardIds().size() && exchangeSink.isFinished() == false) { - runBatch(lastBatchIndex); - } else { - // don't return until all pages are fetched - var completionListener = computeListener.acquireAvoid(); - exchangeSink.addCompletionListener( - ActionListener.runAfter(completionListener, () -> exchangeService.finishSinkHandler(request.sessionId(), null)) - ); - blockingSink.finish(); - } - } - } - - private void runComputeOnDataNode( - CancellableTask task, - String externalId, - PhysicalPlan reducePlan, - DataNodeRequest request, - ComputeListener computeListener - ) { - var parentListener = computeListener.acquireAvoid(); - try { - // run compute with target shards - var internalSink = exchangeService.createSinkHandler(request.sessionId(), request.pragmas().exchangeBufferSize()); - DataNodeRequestExecutor dataNodeRequestExecutor = new DataNodeRequestExecutor( - request, - task, - internalSink, - request.configuration().pragmas().maxConcurrentShardsPerNode(), - computeListener - ); - dataNodeRequestExecutor.start(); - // run the node-level reduction - var externalSink = exchangeService.getSinkHandler(externalId); - task.addListener(() -> exchangeService.finishSinkHandler(externalId, new TaskCancelledException(task.getReasonCancelled()))); - var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor, computeListener.acquireAvoid()); - exchangeSource.addRemoteSink(internalSink::fetchPageAsync, true, 1, ActionListener.noop()); - ActionListener reductionListener = computeListener.acquireCompute(); - runCompute( - task, - new ComputeContext( - request.sessionId(), - request.clusterAlias(), - List.of(), - request.configuration(), - new FoldContext(request.pragmas().foldLimit().getBytes()), - exchangeSource, - externalSink - ), - reducePlan, - ActionListener.wrap(resp -> { - // don't return until all pages are fetched - externalSink.addCompletionListener(ActionListener.running(() -> { - exchangeService.finishSinkHandler(externalId, null); - reductionListener.onResponse(resp); - })); - }, e -> { - exchangeService.finishSinkHandler(externalId, e); - reductionListener.onFailure(e); - }) - ); - parentListener.onResponse(null); - } catch (Exception e) { - exchangeService.finishSinkHandler(externalId, e); - exchangeService.finishSinkHandler(request.sessionId(), e); - parentListener.onFailure(e); - } - } - - private static PhysicalPlan reductionPlan(ExchangeSinkExec plan, boolean enable) { + static PhysicalPlan reductionPlan(ExchangeSinkExec plan, boolean enable) { PhysicalPlan reducePlan = new ExchangeSourceExec(plan.source(), plan.output(), plan.isIntermediateAgg()); if (enable) { PhysicalPlan p = PlannerUtils.reductionPlan(plan); @@ -813,149 +354,7 @@ private static PhysicalPlan reductionPlan(ExchangeSinkExec plan, boolean enable) return new ExchangeSinkExec(plan.source(), plan.output(), plan.isIntermediateAgg(), reducePlan); } - private class DataNodeRequestHandler implements TransportRequestHandler { - @Override - public void messageReceived(DataNodeRequest request, TransportChannel channel, Task task) { - final ActionListener listener = new ChannelActionListener<>(channel); - final PhysicalPlan reductionPlan; - if (request.plan() instanceof ExchangeSinkExec plan) { - reductionPlan = reductionPlan(plan, request.runNodeLevelReduction()); - } else { - listener.onFailure(new IllegalStateException("expected exchange sink for a remote compute; got " + request.plan())); - return; - } - final String sessionId = request.sessionId(); - request = new DataNodeRequest( - sessionId + "[n]", // internal session - request.configuration(), - request.clusterAlias(), - request.shardIds(), - request.aliasFilters(), - request.plan(), - request.indices(), - request.indicesOptions(), - request.runNodeLevelReduction() - ); - try (var computeListener = ComputeListener.create(transportService, (CancellableTask) task, listener)) { - runComputeOnDataNode((CancellableTask) task, sessionId, reductionPlan, request, computeListener); - } - } - } - - public static final String CLUSTER_ACTION_NAME = EsqlQueryAction.NAME + "/cluster"; - - private class ClusterRequestHandler implements TransportRequestHandler { - @Override - public void messageReceived(ClusterComputeRequest request, TransportChannel channel, Task task) { - ChannelActionListener listener = new ChannelActionListener<>(channel); - RemoteClusterPlan remoteClusterPlan = request.remoteClusterPlan(); - var plan = remoteClusterPlan.plan(); - if (plan instanceof ExchangeSinkExec == false) { - listener.onFailure(new IllegalStateException("expected exchange sink for a remote compute; got " + plan)); - return; - } - String clusterAlias = request.clusterAlias(); - /* - * This handler runs only on remote cluster coordinators, so it creates a new local EsqlExecutionInfo object to record - * execution metadata for ES|QL processing local to this cluster. The execution info will be copied into the - * ComputeResponse that is sent back to the primary coordinating cluster. - */ - EsqlExecutionInfo execInfo = new EsqlExecutionInfo(true); - execInfo.swapCluster(clusterAlias, (k, v) -> new EsqlExecutionInfo.Cluster(clusterAlias, Arrays.toString(request.indices()))); - CancellableTask cancellable = (CancellableTask) task; - try (var computeListener = ComputeListener.create(clusterAlias, transportService, cancellable, execInfo, listener)) { - runComputeOnRemoteCluster( - clusterAlias, - request.sessionId(), - (CancellableTask) task, - request.configuration(), - (ExchangeSinkExec) plan, - Set.of(remoteClusterPlan.targetIndices()), - remoteClusterPlan.originalIndices(), - execInfo, - computeListener - ); - } - } - } - - /** - * Performs a compute on a remote cluster. The output pages are placed in an exchange sink specified by - * {@code globalSessionId}. The coordinator on the main cluster will poll pages from there. - *

    - * Currently, the coordinator on the remote cluster polls pages from data nodes within the remote cluster - * and performs cluster-level reduction before sending pages to the querying cluster. This reduction aims - * to minimize data transfers across clusters but may require additional CPU resources for operations like - * aggregations. - */ - void runComputeOnRemoteCluster( - String clusterAlias, - String globalSessionId, - CancellableTask parentTask, - Configuration configuration, - ExchangeSinkExec plan, - Set concreteIndices, - OriginalIndices originalIndices, - EsqlExecutionInfo executionInfo, - ComputeListener computeListener - ) { - final var exchangeSink = exchangeService.getSinkHandler(globalSessionId); - parentTask.addListener( - () -> exchangeService.finishSinkHandler(globalSessionId, new TaskCancelledException(parentTask.getReasonCancelled())) - ); - final String localSessionId = clusterAlias + ":" + globalSessionId; - final PhysicalPlan coordinatorPlan = reductionPlan(plan, true); - var exchangeSource = new ExchangeSourceHandler( - configuration.pragmas().exchangeBufferSize(), - transportService.getThreadPool().executor(ThreadPool.Names.SEARCH), - computeListener.acquireAvoid() - ); - try (Releasable ignored = exchangeSource.addEmptySink()) { - exchangeSink.addCompletionListener(computeListener.acquireAvoid()); - runCompute( - parentTask, - new ComputeContext( - localSessionId, - clusterAlias, - List.of(), - configuration, - configuration.newFoldContext(), - exchangeSource, - exchangeSink - ), - coordinatorPlan, - computeListener.acquireCompute(clusterAlias) - ); - startComputeOnDataNodes( - localSessionId, - clusterAlias, - parentTask, - configuration, - plan, - concreteIndices, - originalIndices, - exchangeSource, - executionInfo, - computeListener - ); - } - } - - record ComputeContext( - String sessionId, - String clusterAlias, - List searchContexts, - Configuration configuration, - FoldContext foldCtx, - ExchangeSourceHandler exchangeSource, - ExchangeSinkHandler exchangeSink - ) { - public List searchExecutionContexts() { - return searchContexts.stream().map(ctx -> ctx.getSearchExecutionContext()).toList(); - } - } - - private String newChildSession(String session) { + String newChildSession(String session) { return session + "/" + childSessionIdGenerator.incrementAndGet(); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java new file mode 100644 index 0000000000000..1a1e5726a487b --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/DataNodeComputeHandler.java @@ -0,0 +1,476 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.ActionRunnable; +import org.elasticsearch.action.OriginalIndices; +import org.elasticsearch.action.search.SearchShardsGroup; +import org.elasticsearch.action.search.SearchShardsRequest; +import org.elasticsearch.action.search.SearchShardsResponse; +import org.elasticsearch.action.support.ChannelActionListener; +import org.elasticsearch.action.support.RefCountingRunnable; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.compute.EsqlRefCountingListener; +import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.compute.operator.exchange.ExchangeSink; +import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; +import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.shard.IndexShard; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.index.shard.ShardNotFoundException; +import org.elasticsearch.search.SearchService; +import org.elasticsearch.search.internal.AliasFilter; +import org.elasticsearch.search.internal.SearchContext; +import org.elasticsearch.search.internal.ShardSearchRequest; +import org.elasticsearch.tasks.CancellableTask; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.tasks.TaskCancelledException; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.Transport; +import org.elasticsearch.transport.TransportChannel; +import org.elasticsearch.transport.TransportRequestHandler; +import org.elasticsearch.transport.TransportRequestOptions; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; +import org.elasticsearch.xpack.esql.action.EsqlSearchShardsAction; +import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.plan.physical.ExchangeSinkExec; +import org.elasticsearch.xpack.esql.plan.physical.PhysicalPlan; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; +import org.elasticsearch.xpack.esql.session.Configuration; + +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Executor; +import java.util.concurrent.atomic.AtomicBoolean; + +import static org.elasticsearch.xpack.esql.plugin.EsqlPlugin.ESQL_WORKER_THREAD_POOL_NAME; + +/** + * Handles computes within a single cluster by dispatching {@link DataNodeRequest} to data nodes + * and executing these computes on the data nodes. + */ +final class DataNodeComputeHandler implements TransportRequestHandler { + private final ComputeService computeService; + private final SearchService searchService; + private final TransportService transportService; + private final ExchangeService exchangeService; + private final Executor esqlExecutor; + + DataNodeComputeHandler( + ComputeService computeService, + SearchService searchService, + TransportService transportService, + ExchangeService exchangeService, + Executor esqlExecutor + ) { + this.computeService = computeService; + this.searchService = searchService; + this.transportService = transportService; + this.exchangeService = exchangeService; + this.esqlExecutor = esqlExecutor; + transportService.registerRequestHandler(ComputeService.DATA_ACTION_NAME, esqlExecutor, DataNodeRequest::new, this); + } + + void startComputeOnDataNodes( + String sessionId, + String clusterAlias, + CancellableTask parentTask, + Configuration configuration, + PhysicalPlan dataNodePlan, + Set concreteIndices, + OriginalIndices originalIndices, + ExchangeSourceHandler exchangeSource, + EsqlExecutionInfo executionInfo, + ComputeListener computeListener + ) { + QueryBuilder requestFilter = PlannerUtils.requestTimestampFilter(dataNodePlan); + var lookupListener = ActionListener.releaseAfter(computeListener.acquireAvoid(), exchangeSource.addEmptySink()); + // SearchShards API can_match is done in lookupDataNodes + lookupDataNodes(parentTask, clusterAlias, requestFilter, concreteIndices, originalIndices, ActionListener.wrap(dataNodeResult -> { + try (EsqlRefCountingListener refs = new EsqlRefCountingListener(lookupListener)) { + // update ExecutionInfo with shard counts (total and skipped) + executionInfo.swapCluster( + clusterAlias, + (k, v) -> new EsqlExecutionInfo.Cluster.Builder(v).setTotalShards(dataNodeResult.totalShards()) + // do not set successful or failed shard count here - do it when search is done + .setSkippedShards(dataNodeResult.skippedShards()) + .build() + ); + + // For each target node, first open a remote exchange on the remote node, then link the exchange source to + // the new remote exchange sink, and initialize the computation on the target node via data-node-request. + for (DataNode node : dataNodeResult.dataNodes()) { + var queryPragmas = configuration.pragmas(); + var childSessionId = computeService.newChildSession(sessionId); + ExchangeService.openExchange( + transportService, + node.connection, + childSessionId, + queryPragmas.exchangeBufferSize(), + esqlExecutor, + refs.acquire().delegateFailureAndWrap((l, unused) -> { + var remoteSink = exchangeService.newRemoteSink(parentTask, childSessionId, transportService, node.connection); + exchangeSource.addRemoteSink(remoteSink, true, queryPragmas.concurrentExchangeClients(), ActionListener.noop()); + ActionListener computeResponseListener = computeListener.acquireCompute(clusterAlias); + var dataNodeListener = ActionListener.runBefore(computeResponseListener, () -> l.onResponse(null)); + final boolean sameNode = transportService.getLocalNode().getId().equals(node.connection.getNode().getId()); + var dataNodeRequest = new DataNodeRequest( + childSessionId, + configuration, + clusterAlias, + node.shardIds, + node.aliasFilters, + dataNodePlan, + originalIndices.indices(), + originalIndices.indicesOptions(), + sameNode == false && queryPragmas.nodeLevelReduction() + ); + transportService.sendChildRequest( + node.connection, + ComputeService.DATA_ACTION_NAME, + dataNodeRequest, + parentTask, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(dataNodeListener, ComputeResponse::new, esqlExecutor) + ); + }) + ); + } + } + }, lookupListener::onFailure)); + } + + private void acquireSearchContexts( + String clusterAlias, + List shardIds, + Configuration configuration, + Map aliasFilters, + ActionListener> listener + ) { + final List targetShards = new ArrayList<>(); + try { + for (ShardId shardId : shardIds) { + var indexShard = searchService.getIndicesService().indexServiceSafe(shardId.getIndex()).getShard(shardId.id()); + targetShards.add(indexShard); + } + } catch (Exception e) { + listener.onFailure(e); + return; + } + final var doAcquire = ActionRunnable.supply(listener, () -> { + final List searchContexts = new ArrayList<>(targetShards.size()); + boolean success = false; + try { + for (IndexShard shard : targetShards) { + var aliasFilter = aliasFilters.getOrDefault(shard.shardId().getIndex(), AliasFilter.EMPTY); + var shardRequest = new ShardSearchRequest( + shard.shardId(), + configuration.absoluteStartedTimeInMillis(), + aliasFilter, + clusterAlias + ); + // TODO: `searchService.createSearchContext` allows opening search contexts without limits, + // we need to limit the number of active search contexts here or in SearchService + SearchContext context = searchService.createSearchContext(shardRequest, SearchService.NO_TIMEOUT); + searchContexts.add(context); + } + for (SearchContext searchContext : searchContexts) { + searchContext.preProcess(); + } + success = true; + return searchContexts; + } finally { + if (success == false) { + IOUtils.close(searchContexts); + } + } + }); + final AtomicBoolean waitedForRefreshes = new AtomicBoolean(); + try (RefCountingRunnable refs = new RefCountingRunnable(() -> { + if (waitedForRefreshes.get()) { + esqlExecutor.execute(doAcquire); + } else { + doAcquire.run(); + } + })) { + for (IndexShard targetShard : targetShards) { + final Releasable ref = refs.acquire(); + targetShard.ensureShardSearchActive(await -> { + try (ref) { + if (await) { + waitedForRefreshes.set(true); + } + } + }); + } + } + } + + record DataNode(Transport.Connection connection, List shardIds, Map aliasFilters) { + + } + + /** + * Result from lookupDataNodes where can_match is performed to determine what shards can be skipped + * and which target nodes are needed for running the ES|QL query + * + * @param dataNodes list of DataNode to perform the ES|QL query on + * @param totalShards Total number of shards (from can_match phase), including skipped shards + * @param skippedShards Number of skipped shards (from can_match phase) + */ + record DataNodeResult(List dataNodes, int totalShards, int skippedShards) {} + + /** + * Performs can_match and find the target nodes for the given target indices and filter. + *

    + * Ideally, the search_shards API should be called before the field-caps API; however, this can lead + * to a situation where the column structure (i.e., matched data types) differs depending on the query. + */ + private void lookupDataNodes( + Task parentTask, + String clusterAlias, + QueryBuilder filter, + Set concreteIndices, + OriginalIndices originalIndices, + ActionListener listener + ) { + ActionListener searchShardsListener = listener.map(resp -> { + Map nodes = new HashMap<>(); + for (DiscoveryNode node : resp.getNodes()) { + nodes.put(node.getId(), node); + } + Map> nodeToShards = new HashMap<>(); + Map> nodeToAliasFilters = new HashMap<>(); + int totalShards = 0; + int skippedShards = 0; + for (SearchShardsGroup group : resp.getGroups()) { + var shardId = group.shardId(); + if (group.allocatedNodes().isEmpty()) { + throw new ShardNotFoundException(group.shardId(), "no shard copies found {}", group.shardId()); + } + if (concreteIndices.contains(shardId.getIndexName()) == false) { + continue; + } + totalShards++; + if (group.skipped()) { + skippedShards++; + continue; + } + String targetNode = group.allocatedNodes().get(0); + nodeToShards.computeIfAbsent(targetNode, k -> new ArrayList<>()).add(shardId); + AliasFilter aliasFilter = resp.getAliasFilters().get(shardId.getIndex().getUUID()); + if (aliasFilter != null) { + nodeToAliasFilters.computeIfAbsent(targetNode, k -> new HashMap<>()).put(shardId.getIndex(), aliasFilter); + } + } + List dataNodes = new ArrayList<>(nodeToShards.size()); + for (Map.Entry> e : nodeToShards.entrySet()) { + DiscoveryNode node = nodes.get(e.getKey()); + Map aliasFilters = nodeToAliasFilters.getOrDefault(e.getKey(), Map.of()); + dataNodes.add(new DataNode(transportService.getConnection(node), e.getValue(), aliasFilters)); + } + return new DataNodeResult(dataNodes, totalShards, skippedShards); + }); + SearchShardsRequest searchShardsRequest = new SearchShardsRequest( + originalIndices.indices(), + originalIndices.indicesOptions(), + filter, + null, + null, + false, + clusterAlias + ); + transportService.sendChildRequest( + transportService.getLocalNode(), + EsqlSearchShardsAction.TYPE.name(), + searchShardsRequest, + parentTask, + TransportRequestOptions.EMPTY, + new ActionListenerResponseHandler<>(searchShardsListener, SearchShardsResponse::new, esqlExecutor) + ); + } + + private class DataNodeRequestExecutor { + private final DataNodeRequest request; + private final CancellableTask parentTask; + private final ExchangeSinkHandler exchangeSink; + private final ComputeListener computeListener; + private final int maxConcurrentShards; + private final ExchangeSink blockingSink; // block until we have completed on all shards or the coordinator has enough data + + DataNodeRequestExecutor( + DataNodeRequest request, + CancellableTask parentTask, + ExchangeSinkHandler exchangeSink, + int maxConcurrentShards, + ComputeListener computeListener + ) { + this.request = request; + this.parentTask = parentTask; + this.exchangeSink = exchangeSink; + this.computeListener = computeListener; + this.maxConcurrentShards = maxConcurrentShards; + this.blockingSink = exchangeSink.createExchangeSink(); + } + + void start() { + parentTask.addListener( + () -> exchangeService.finishSinkHandler(request.sessionId(), new TaskCancelledException(parentTask.getReasonCancelled())) + ); + runBatch(0); + } + + private void runBatch(int startBatchIndex) { + final Configuration configuration = request.configuration(); + final String clusterAlias = request.clusterAlias(); + final var sessionId = request.sessionId(); + final int endBatchIndex = Math.min(startBatchIndex + maxConcurrentShards, request.shardIds().size()); + List shardIds = request.shardIds().subList(startBatchIndex, endBatchIndex); + ActionListener batchListener = new ActionListener<>() { + final ActionListener ref = computeListener.acquireCompute(); + + @Override + public void onResponse(ComputeResponse result) { + try { + onBatchCompleted(endBatchIndex); + } finally { + ref.onResponse(result); + } + } + + @Override + public void onFailure(Exception e) { + try { + exchangeService.finishSinkHandler(request.sessionId(), e); + } finally { + ref.onFailure(e); + } + } + }; + acquireSearchContexts(clusterAlias, shardIds, configuration, request.aliasFilters(), ActionListener.wrap(searchContexts -> { + assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.SEARCH, ESQL_WORKER_THREAD_POOL_NAME); + var computeContext = new ComputeContext( + sessionId, + clusterAlias, + searchContexts, + configuration, + configuration.newFoldContext(), + null, + exchangeSink + ); + computeService.runCompute(parentTask, computeContext, request.plan(), batchListener); + }, batchListener::onFailure)); + } + + private void onBatchCompleted(int lastBatchIndex) { + if (lastBatchIndex < request.shardIds().size() && exchangeSink.isFinished() == false) { + runBatch(lastBatchIndex); + } else { + // don't return until all pages are fetched + var completionListener = computeListener.acquireAvoid(); + exchangeSink.addCompletionListener( + ActionListener.runAfter(completionListener, () -> exchangeService.finishSinkHandler(request.sessionId(), null)) + ); + blockingSink.finish(); + } + } + } + + private void runComputeOnDataNode( + CancellableTask task, + String externalId, + PhysicalPlan reducePlan, + DataNodeRequest request, + ComputeListener computeListener + ) { + var parentListener = computeListener.acquireAvoid(); + try { + // run compute with target shards + var internalSink = exchangeService.createSinkHandler(request.sessionId(), request.pragmas().exchangeBufferSize()); + DataNodeRequestExecutor dataNodeRequestExecutor = new DataNodeRequestExecutor( + request, + task, + internalSink, + request.configuration().pragmas().maxConcurrentShardsPerNode(), + computeListener + ); + dataNodeRequestExecutor.start(); + // run the node-level reduction + var externalSink = exchangeService.getSinkHandler(externalId); + task.addListener(() -> exchangeService.finishSinkHandler(externalId, new TaskCancelledException(task.getReasonCancelled()))); + var exchangeSource = new ExchangeSourceHandler(1, esqlExecutor, computeListener.acquireAvoid()); + exchangeSource.addRemoteSink(internalSink::fetchPageAsync, true, 1, ActionListener.noop()); + ActionListener reductionListener = computeListener.acquireCompute(); + computeService.runCompute( + task, + new ComputeContext( + request.sessionId(), + request.clusterAlias(), + List.of(), + request.configuration(), + new FoldContext(request.pragmas().foldLimit().getBytes()), + exchangeSource, + externalSink + ), + reducePlan, + ActionListener.wrap(resp -> { + // don't return until all pages are fetched + externalSink.addCompletionListener(ActionListener.running(() -> { + exchangeService.finishSinkHandler(externalId, null); + reductionListener.onResponse(resp); + })); + }, e -> { + exchangeService.finishSinkHandler(externalId, e); + reductionListener.onFailure(e); + }) + ); + parentListener.onResponse(null); + } catch (Exception e) { + exchangeService.finishSinkHandler(externalId, e); + exchangeService.finishSinkHandler(request.sessionId(), e); + parentListener.onFailure(e); + } + } + + @Override + public void messageReceived(DataNodeRequest request, TransportChannel channel, Task task) { + final ActionListener listener = new ChannelActionListener<>(channel); + final PhysicalPlan reductionPlan; + if (request.plan() instanceof ExchangeSinkExec plan) { + reductionPlan = ComputeService.reductionPlan(plan, request.runNodeLevelReduction()); + } else { + listener.onFailure(new IllegalStateException("expected exchange sink for a remote compute; got " + request.plan())); + return; + } + final String sessionId = request.sessionId(); + request = new DataNodeRequest( + sessionId + "[n]", // internal session + request.configuration(), + request.clusterAlias(), + request.shardIds(), + request.aliasFilters(), + request.plan(), + request.indices(), + request.indicesOptions(), + request.runNodeLevelReduction() + ); + try (var computeListener = ComputeListener.create(transportService, (CancellableTask) task, listener)) { + runComputeOnDataNode((CancellableTask) task, sessionId, reductionPlan, request, computeListener); + } + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index 84173eeecc060..43aa0247d4481 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -44,6 +44,7 @@ import org.elasticsearch.xpack.esql.action.EsqlQueryTask; import org.elasticsearch.xpack.esql.core.async.AsyncTaskManagementService; import org.elasticsearch.xpack.esql.core.expression.FoldContext; +import org.elasticsearch.xpack.esql.enrich.AbstractLookupService; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolver; import org.elasticsearch.xpack.esql.enrich.LookupFromIndexService; @@ -107,8 +108,23 @@ public TransportEsqlQueryAction( exchangeService.registerTransportHandler(transportService); this.exchangeService = exchangeService; this.enrichPolicyResolver = new EnrichPolicyResolver(clusterService, transportService, planExecutor.indexResolver()); - this.enrichLookupService = new EnrichLookupService(clusterService, searchService, transportService, bigArrays, blockFactory); - this.lookupFromIndexService = new LookupFromIndexService(clusterService, searchService, transportService, bigArrays, blockFactory); + AbstractLookupService.CreateShardContext lookupCreateShardContext = AbstractLookupService.CreateShardContext.fromSearchService( + searchService + ); + this.enrichLookupService = new EnrichLookupService( + clusterService, + lookupCreateShardContext, + transportService, + bigArrays, + blockFactory + ); + this.lookupFromIndexService = new LookupFromIndexService( + clusterService, + lookupCreateShardContext, + transportService, + bigArrays, + blockFactory + ); this.computeService = new ComputeService( searchService, transportService, diff --git a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TranslationAwareExpressionQuery.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/TranslationAwareExpressionQuery.java similarity index 79% rename from x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TranslationAwareExpressionQuery.java rename to x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/TranslationAwareExpressionQuery.java index 92a42d3053b68..e92f3c899f581 100644 --- a/x-pack/plugin/esql-core/src/main/java/org/elasticsearch/xpack/esql/core/querydsl/query/TranslationAwareExpressionQuery.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/querydsl/query/TranslationAwareExpressionQuery.java @@ -5,14 +5,16 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.core.querydsl.query; +package org.elasticsearch.xpack.esql.querydsl.query; import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.xpack.esql.capabilities.TranslationAware; +import org.elasticsearch.xpack.esql.core.querydsl.query.Query; import org.elasticsearch.xpack.esql.core.tree.Source; /** * Expressions that store their own {@link QueryBuilder} and implement - * {@link org.elasticsearch.xpack.esql.core.expression.TranslationAware} can use {@link TranslationAwareExpressionQuery} + * {@link TranslationAware} can use {@link TranslationAwareExpressionQuery} * to wrap their {@link QueryBuilder}, instead of using the other existing {@link Query} implementations. */ public class TranslationAwareExpressionQuery extends Query { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/QueryBuilderResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/QueryBuilderResolver.java index ab4a9b02e41fa..7db81069f9d3c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/QueryBuilderResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/QueryBuilderResolver.java @@ -20,7 +20,6 @@ import org.elasticsearch.xpack.esql.expression.function.fulltext.FullTextFunction; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.LogicalPlan; -import org.elasticsearch.xpack.esql.planner.PlannerUtils; import java.io.IOException; import java.util.HashMap; @@ -29,6 +28,8 @@ import java.util.Set; import java.util.function.BiConsumer; +import static org.elasticsearch.xpack.esql.planner.TranslatorHandler.TRANSLATOR_HANDLER; + /** * Some {@link FullTextFunction} implementations such as {@link org.elasticsearch.xpack.esql.expression.function.fulltext.Match} * will be translated to a {@link QueryBuilder} that require a rewrite phase on the coordinator. @@ -98,9 +99,7 @@ private Set fullTextFunctions(LogicalPlan plan) { public Set indexNames(LogicalPlan plan) { Holder> indexNames = new Holder<>(); - - plan.forEachDown(EsRelation.class, esRelation -> { indexNames.set(esRelation.index().concreteIndices()); }); - + plan.forEachDown(EsRelation.class, esRelation -> indexNames.set(esRelation.concreteIndices())); return indexNames.get(); } @@ -141,7 +140,7 @@ private class FullTextFunctionsRewritable implements Rewriteable(); for (FullTextFunction func : functions) { - queryBuilderMap.put(func, func.asQuery(PlannerUtils.TRANSLATOR_HANDLER).asBuilder()); + queryBuilderMap.put(func, TRANSLATOR_HANDLER.asQuery(func).asBuilder()); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 1d49409dc964d..f4c68f141460b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -33,6 +33,7 @@ import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.DriverSleeps; import org.elasticsearch.compute.operator.DriverStatus; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; @@ -50,7 +51,6 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.versionfield.Version; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 84af46a8cbbf0..1c3b3a5c463e7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -21,9 +21,11 @@ import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeSet; +import org.elasticsearch.xpack.esql.core.expression.EntryExpression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.MapExpression; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; @@ -34,6 +36,7 @@ import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; import org.elasticsearch.xpack.esql.expression.function.aggregate.Min; import org.elasticsearch.xpack.esql.expression.function.fulltext.Match; +import org.elasticsearch.xpack.esql.expression.function.scalar.map.LogWithBaseInMap; import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; import org.elasticsearch.xpack.esql.parser.ParsingException; @@ -112,7 +115,7 @@ public void testIndexResolution() { var plan = analyzer.analyze(UNRESOLVED_RELATION); var limit = as(plan, Limit.class); - assertEquals(new EsRelation(EMPTY, idx, NO_FIELDS, IndexMode.STANDARD), limit.child()); + assertEquals(new EsRelation(EMPTY, idx.name(), IndexMode.STANDARD, idx.indexNameWithModes(), NO_FIELDS), limit.child()); } public void testFailOnUnresolvedIndex() { @@ -130,7 +133,7 @@ public void testIndexWithClusterResolution() { var plan = analyzer.analyze(UNRESOLVED_RELATION); var limit = as(plan, Limit.class); - assertEquals(new EsRelation(EMPTY, idx, NO_FIELDS, IndexMode.STANDARD), limit.child()); + assertEquals(new EsRelation(EMPTY, idx.name(), IndexMode.STANDARD, idx.indexNameWithModes(), NO_FIELDS), limit.child()); } public void testAttributeResolution() { @@ -2068,7 +2071,7 @@ public void testLookup() { assertThat(project.projections().stream().map(Object::toString).toList(), hasItem(matchesRegex("languages\\{f}#\\d+ AS int#\\d+"))); var esRelation = as(project.child(), EsRelation.class); - assertThat(esRelation.index().name(), equalTo("test")); + assertThat(esRelation.indexPattern(), equalTo("test")); // Lookup's output looks sensible too assertMap( @@ -2580,8 +2583,53 @@ public void testFromEnrichAndMatchColonUsage() { assertEquals(enrich.policy().getMatchField(), "language_code"); var eval = as(enrich.child(), Eval.class); var esRelation = as(eval.child(), EsRelation.class); - assertEquals(esRelation.index().name(), "test"); + assertEquals(esRelation.indexPattern(), "test"); + } + public void testMapExpressionAsFunctionArgument() { + assumeTrue("MapExpression require snapshot build", EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled()); + LogicalPlan plan = analyze(""" + from test + | EVAL l = log_with_base_in_map(languages, {"base":2.0}) + | KEEP l + """, "mapping-default.json"); + Limit limit = as(plan, Limit.class); + EsqlProject proj = as(limit.child(), EsqlProject.class); + List fields = proj.projections(); + assertEquals(1, fields.size()); + ReferenceAttribute ra = as(fields.get(0), ReferenceAttribute.class); + assertEquals("l", ra.name()); + assertEquals(DataType.DOUBLE, ra.dataType()); + Eval eval = as(proj.child(), Eval.class); + assertEquals(1, eval.fields().size()); + Alias a = as(eval.fields().get(0), Alias.class); + LogWithBaseInMap l = as(a.child(), LogWithBaseInMap.class); + MapExpression me = as(l.base(), MapExpression.class); + assertEquals(1, me.entryExpressions().size()); + EntryExpression ee = as(me.entryExpressions().get(0), EntryExpression.class); + assertEquals(new Literal(EMPTY, "base", DataType.KEYWORD), ee.key()); + assertEquals(new Literal(EMPTY, 2.0, DataType.DOUBLE), ee.value()); + assertEquals(DataType.DOUBLE, ee.dataType()); + EsRelation esRelation = as(eval.child(), EsRelation.class); + assertEquals(esRelation.indexPattern(), "test"); + } + + private void verifyMapExpression(MapExpression me) { + Literal option1 = new Literal(EMPTY, "option1", DataType.KEYWORD); + Literal value1 = new Literal(EMPTY, "value1", DataType.KEYWORD); + Literal option2 = new Literal(EMPTY, "option2", DataType.KEYWORD); + Literal value2 = new Literal(EMPTY, List.of(1, 2, 3), DataType.INTEGER); + + assertEquals(2, me.entryExpressions().size()); + EntryExpression ee = as(me.entryExpressions().get(0), EntryExpression.class); + assertEquals(option1, ee.key()); + assertEquals(value1, ee.value()); + assertEquals(value1.dataType(), ee.dataType()); + + ee = as(me.entryExpressions().get(1), EntryExpression.class); + assertEquals(option2, ee.key()); + assertEquals(value2, ee.value()); + assertEquals(value2.dataType(), ee.dataType()); } private void verifyUnsupported(String query, String errorMessage) { @@ -2644,7 +2692,6 @@ private void assertEmptyEsRelation(LogicalPlan plan) { assertThat(plan, instanceOf(EsRelation.class)); EsRelation esRelation = (EsRelation) plan; assertThat(esRelation.output(), equalTo(NO_FIELDS)); - assertTrue(esRelation.index().mapping().isEmpty()); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java index f932992e81557..e3214411698b0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/VerifierTests.java @@ -284,27 +284,26 @@ public void testRoundFunctionInvalidInputs() { error("row a = 1, b = \"c\" | eval x = round(b)") ); assertEquals( - "1:31: second argument of [round(a, b)] must be [integer], found value [b] type [keyword]", + "1:31: second argument of [round(a, b)] must be [whole number except unsigned_long or counter types], " + + "found value [b] type [keyword]", error("row a = 1, b = \"c\" | eval x = round(a, b)") ); assertEquals( - "1:31: second argument of [round(a, 3.5)] must be [integer], found value [3.5] type [double]", + "1:31: second argument of [round(a, 3.5)] must be [whole number except unsigned_long or counter types], " + + "found value [3.5] type [double]", error("row a = 1, b = \"c\" | eval x = round(a, 3.5)") ); } public void testImplicitCastingErrorMessages() { - assertEquals( - "1:23: Cannot convert string [c] to [INTEGER], error [Cannot parse number [c]]", - error("row a = round(123.45, \"c\")") - ); + assertEquals("1:23: Cannot convert string [c] to [LONG], error [Cannot parse number [c]]", error("row a = round(123.45, \"c\")")); assertEquals( "1:27: Cannot convert string [c] to [DOUBLE], error [Cannot parse number [c]]", error("row a = 1 | eval x = acos(\"c\")") ); assertEquals( "1:33: Cannot convert string [c] to [DOUBLE], error [Cannot parse number [c]]\n" - + "line 1:38: Cannot convert string [a] to [INTEGER], error [Cannot parse number [a]]", + + "line 1:38: Cannot convert string [a] to [LONG], error [Cannot parse number [a]]", error("row a = 1 | eval x = round(acos(\"c\"),\"a\")") ); assertEquals( @@ -1994,6 +1993,27 @@ public void testLookupJoinDataTypeMismatch() { ); } + public void testInvalidMapOption() { + assumeTrue("MapExpression require snapshot build", EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled()); + // invalid key + assertEquals( + "1:22: Invalid option key in [log_with_base_in_map(languages, {\"base\":2.0, \"invalidOption\":true})], " + + "expected base but got [\"invalidOption\"]", + error("FROM test | EVAL l = log_with_base_in_map(languages, {\"base\":2.0, \"invalidOption\":true})") + ); + // key is case-sensitive + assertEquals( + "1:22: Invalid option key in [log_with_base_in_map(languages, {\"Base\":2.0})], " + "expected base but got [\"Base\"]", + error("FROM test | EVAL l = log_with_base_in_map(languages, {\"Base\":2.0})") + ); + // invalid value + assertEquals( + "1:22: Invalid option value in [log_with_base_in_map(languages, {\"base\":\"invalid\"})], " + + "expected a numeric number but got [invalid]", + error("FROM test | EVAL l = log_with_base_in_map(languages, {\"base\":\"invalid\"})") + ); + } + private void query(String query) { query(query, defaultAnalyzer); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexServiceResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexServiceResponseTests.java index 098ea9eaa0c2d..b6fde4b609203 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexServiceResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/enrich/LookupFromIndexServiceResponseTests.java @@ -16,13 +16,14 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.RandomBlock; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xpack.esql.TestBlockFactory; import org.junit.After; import java.io.IOException; @@ -37,23 +38,34 @@ public class LookupFromIndexServiceResponseTests extends AbstractWireSerializing private final List breakers = new ArrayList<>(); LookupFromIndexService.LookupResponse createTestInstance(BlockFactory blockFactory) { - return new LookupFromIndexService.LookupResponse(randomList(0, 10, () -> testPage(blockFactory)), blockFactory); + return new LookupFromIndexService.LookupResponse(randomList(0, 10, () -> randomPage(blockFactory)), blockFactory); } /** - * Build a {@link Page} to test serialization. If we had nice random - * {@linkplain Page} generation we'd use that happily, but it's off - * in the tests for compute, and we're in ESQL. And we don't - * really need a fully random one to verify serialization - * here. + * Build a random {@link Page} to test serialization. */ - Page testPage(BlockFactory blockFactory) { - try (IntVector.Builder builder = blockFactory.newIntVectorFixedBuilder(3)) { - builder.appendInt(1); - builder.appendInt(2); - builder.appendInt(3); - return new Page(builder.build().asBlock()); + Page randomPage(BlockFactory blockFactory) { + Block[] blocks = new Block[between(1, 20)]; + int positionCount = between(1, 100); + try { + for (int i = 0; i < blocks.length; i++) { + blocks[i] = RandomBlock.randomBlock( + blockFactory, + RandomBlock.randomElementType(), + positionCount, + randomBoolean(), + 1, + 1, + 0, + 0 + ).block(); + } + } finally { + if (blocks[blocks.length - 1] == null) { + Releasables.close(blocks); + } } + return new Page(blocks); } @Override @@ -72,13 +84,13 @@ protected LookupFromIndexService.LookupResponse mutateInstance(LookupFromIndexSe assertThat(instance.blockFactory, sameInstance(TestBlockFactory.getNonBreakingInstance())); List pages = new ArrayList<>(instance.pages().size()); pages.addAll(instance.pages()); - pages.add(testPage(TestBlockFactory.getNonBreakingInstance())); + pages.add(randomPage(TestBlockFactory.getNonBreakingInstance())); return new LookupFromIndexService.LookupResponse(pages, instance.blockFactory); } @Override protected NamedWriteableRegistry getNamedWriteableRegistry() { - return new NamedWriteableRegistry(List.of(IntBlock.ENTRY)); + return new NamedWriteableRegistry(BlockWritables.getNamedWriteables()); } public void testWithBreaker() throws IOException { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/CanonicalTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/CanonicalTests.java index 4460798347911..da076a5db031b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/CanonicalTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/CanonicalTests.java @@ -13,12 +13,12 @@ import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 6011a972d69ec..67dec69b51393 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -23,6 +23,7 @@ import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.PathUtils; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.logging.LogManager; @@ -30,15 +31,12 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -51,6 +49,8 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNotNull; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; @@ -101,6 +101,9 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.randomLiteral; import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.SerializationTestUtils.assertSerialization; +import static org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry.mapParam; +import static org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry.param; +import static org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry.paramWithoutAnnotation; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; @@ -384,7 +387,9 @@ protected static TestCaseSupplier typeErrorSupplier( /** * Build a test case that asserts that the combination of parameter types is an error. + * @deprecated use an extension of {@link ErrorsForCasesWithoutExamplesTestCase} */ + @Deprecated protected static TestCaseSupplier typeErrorSupplier( boolean includeOrdinal, List> validPerPosition, @@ -643,11 +648,17 @@ protected static void buildLayout(Layout.Builder builder, Expression e) { protected Object toJavaObjectUnsignedLongAware(Block block, int position) { Object result; result = toJavaObject(block, position); - if (result != null && testCase.expectedType() == DataType.UNSIGNED_LONG) { - assertThat(result, instanceOf(Long.class)); - result = NumericUtils.unsignedLongAsBigInteger((Long) result); + if (result == null || testCase.expectedType() != DataType.UNSIGNED_LONG) { + return result; } - return result; + if (result instanceof List l) { + return l.stream().map(v -> { + assertThat(v, instanceOf(Long.class)); + return NumericUtils.unsignedLongAsBigInteger((Long) v); + }).toList(); + } + assertThat(result, instanceOf(Long.class)); + return NumericUtils.unsignedLongAsBigInteger((Long) result); } /** @@ -755,6 +766,9 @@ public static void testFunctionInfo() { continue; } log.info("{}: tested {} vs annotated {}", arg.name(), signatureTypes, annotationTypes); + if (annotationTypes.size() == 1 && annotationTypes.iterator().next().equalsIgnoreCase("map")) { // map is not a DataType + continue; + } assertEquals( "Mismatch between actual and declared param type for [" + arg.name() @@ -908,7 +922,7 @@ public static void renderDocs() throws IOException { description.isAggregation() ); } - renderTypes(description.argNames()); + renderTypes(description.args()); renderParametersList(description.argNames(), description.argDescriptions()); FunctionInfo info = EsqlFunctionRegistry.functionInfo(definition); renderDescription(description.description(), info.detailedDescription(), info.note()); @@ -930,8 +944,9 @@ public static void renderDocs() throws IOException { + "may be changed or removed in a future release. Elastic will work to fix any issues, but features in technical preview " + "are not subject to the support SLA of official GA features.\"]\n"; - private static void renderTypes(List argNames) throws IOException { + private static void renderTypes(List args) throws IOException { StringBuilder header = new StringBuilder(); + List argNames = args.stream().map(EsqlFunctionRegistry.ArgSignature::name).toList(); for (String arg : argNames) { header.append(arg).append(" | "); } @@ -946,8 +961,13 @@ private static void renderTypes(List argNames) throws IOException { continue; } StringBuilder b = new StringBuilder(); - for (DataType arg : sig.getKey()) { - b.append(arg.esNameIfPossible()).append(" | "); + for (int i = 0; i < sig.getKey().size(); i++) { + DataType argType = sig.getKey().get(i); + if (args.get(i).mapArg()) { + b.append("map | "); + } else { + b.append(argType.esNameIfPossible()).append(" | "); + } } b.append("| ".repeat(argNames.size() - sig.getKey().size())); b.append(sig.getValue().esNameIfPossible()); @@ -1097,16 +1117,17 @@ private static void renderDocsForOperators(String name) throws IOException { List args = new ArrayList<>(params.length); for (int i = 1; i < params.length; i++) { // skipping 1st argument, the source if (Configuration.class.isAssignableFrom(params[i].getType()) == false) { - Param paramInfo = params[i].getAnnotation(Param.class); - String paramName = paramInfo == null ? params[i].getName() : paramInfo.name(); - String[] type = paramInfo == null ? new String[] { "?" } : paramInfo.type(); - String desc = paramInfo == null ? "" : paramInfo.description().replace('\n', ' '); - boolean optional = paramInfo == null ? false : paramInfo.optional(); - args.add(new EsqlFunctionRegistry.ArgSignature(paramName, type, desc, optional)); + MapParam mapParamInfo = params[i].getAnnotation(MapParam.class); + if (mapParamInfo != null) { + args.add(mapParam(mapParamInfo)); + } else { + Param paramInfo = params[i].getAnnotation(Param.class); + args.add(paramInfo != null ? param(paramInfo) : paramWithoutAnnotation(params[i].getName())); + } } } renderKibanaFunctionDefinition(name, functionInfo, args, likeOrInOperator(name)); - renderTypes(args.stream().map(EsqlFunctionRegistry.ArgSignature::name).toList()); + renderTypes(args); } private static void renderKibanaInlineDocs(String name, FunctionInfo info) throws IOException { @@ -1187,7 +1208,19 @@ private static void renderKibanaFunctionDefinition( EsqlFunctionRegistry.ArgSignature arg = args.get(i); builder.startObject(); builder.field("name", arg.name()); - builder.field("type", sig.getKey().get(i).esNameIfPossible()); + if (arg.mapArg()) { + builder.field("type", "map"); + builder.field( + "mapParams", + arg.mapParams() + .values() + .stream() + .map(mapArgSignature -> "{" + mapArgSignature + "}") + .collect(Collectors.joining(", ")) + ); + } else { + builder.field("type", sig.getKey().get(i).esNameIfPossible()); + } builder.field("optional", arg.optional()); builder.field("description", arg.description()); builder.endObject(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java index 944515e54af75..429e6685a201c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractScalarFunctionTestCase.java @@ -95,6 +95,24 @@ protected static Iterable parameterSuppliersFromTypedDataWithDefaultCh return parameterSuppliersFromTypedData(anyNullIsNull(entirelyNullPreservesType, randomizeBytesRefsOffset(suppliers))); } + /** + * Converts a list of test cases into a list of parameter suppliers. + * Also, adds a default set of extra test cases. + *

    + * Use if possible, as this method may get updated with new checks in the future. + *

    + * + * @param nullsExpectedType See {@link #anyNullIsNull(List, ExpectedType, ExpectedEvaluatorToString)} + * @param evaluatorToString See {@link #anyNullIsNull(List, ExpectedType, ExpectedEvaluatorToString)} + */ + protected static Iterable parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( + ExpectedType nullsExpectedType, + ExpectedEvaluatorToString evaluatorToString, + List suppliers + ) { + return parameterSuppliersFromTypedData(anyNullIsNull(randomizeBytesRefsOffset(suppliers), nullsExpectedType, evaluatorToString)); + } + /** * Converts a list of test cases into a list of parameter suppliers. * Also, adds a default set of extra test cases. @@ -372,7 +390,11 @@ public void testFold() { Object result = nullOptimized.fold(FoldContext.small()); // Decode unsigned longs into BigIntegers if (testCase.expectedType() == DataType.UNSIGNED_LONG && result != null) { - result = NumericUtils.unsignedLongAsBigInteger((Long) result); + if (result instanceof List l) { + result = l.stream().map(v -> NumericUtils.unsignedLongAsBigInteger((Long) v)).toList(); + } else { + result = NumericUtils.unsignedLongAsBigInteger((Long) result); + } } assertThat(result, testCase.getMatcher()); if (testCase.getExpectedBuildEvaluatorWarnings() != null) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/ErrorsForCasesWithoutExamplesTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/ErrorsForCasesWithoutExamplesTestCase.java index 7269abad07297..3e31031d46a30 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/ErrorsForCasesWithoutExamplesTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/ErrorsForCasesWithoutExamplesTestCase.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvCountErrorTests; import org.hamcrest.Matcher; import java.util.ArrayList; @@ -24,6 +25,10 @@ import static org.elasticsearch.xpack.esql.EsqlTestUtils.randomLiteral; import static org.hamcrest.Matchers.greaterThan; +/** + * Extend me to test that all cases not mentioned in a subclass of + * {@link AbstractFunctionTestCase} produce type errors. + */ public abstract class ErrorsForCasesWithoutExamplesTestCase extends ESTestCase { protected abstract List cases(); @@ -37,6 +42,15 @@ public abstract class ErrorsForCasesWithoutExamplesTestCase extends ESTestCase { */ protected abstract Expression build(Source source, List args); + /** + * A matcher for the invalid type error message. + *

    + * If you are implementing this for a function that should process all types + * then have a look how {@link MvCountErrorTests} does it. It's nice to throw + * an error explaining this. But while someone is implementing a new type + * they will want to turn that off temporarily. And we say that in the note too. + *

    + */ protected abstract Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature); protected final List paramsToSuppliers(Iterable cases) { @@ -56,8 +70,8 @@ public final void test() { List cases = cases(); Set> valid = cases.stream().map(TestCaseSupplier::types).collect(Collectors.toSet()); List> validPerPosition = AbstractFunctionTestCase.validPerPosition(valid); - Iterable> missingSignatures = missingSignatures(cases, valid)::iterator; - for (List signature : missingSignatures) { + Iterable> testCandidates = testCandidates(cases, valid)::iterator; + for (List signature : testCandidates) { logger.debug("checking {}", signature); List args = new ArrayList<>(signature.size()); for (DataType type : signature) { @@ -80,7 +94,10 @@ protected void assertNumberOfCheckedSignatures(int checked) { assertThat("didn't check any signatures", checked, greaterThan(0)); } - private Stream> missingSignatures(List cases, Set> valid) { + /** + * Build a {@link Stream} of test signatures that we should check are invalid. + */ + protected Stream> testCandidates(List cases, Set> valid) { return cases.stream() .map(s -> s.types().size()) .collect(Collectors.toSet()) @@ -125,7 +142,7 @@ protected static String typeErrorMessage( } if (badArgPosition == -1) { throw new IllegalStateException( - "Can't generate error message for these types, you probably need a custom error message function" + "Can't generate error message for these types, you probably need a custom error message function signature =" + signature ); } String ordinal = includeOrdinal ? TypeResolutions.ParamOrdinal.fromIndex(badArgPosition).name().toLowerCase(Locale.ROOT) + " " : ""; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java index 9bf063518d4ba..4f89ba6bd0504 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/TestCaseSupplier.java @@ -18,6 +18,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.MapExpression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; @@ -1416,6 +1417,10 @@ public static final class TestCase { */ private final String[] expectedBuildEvaluatorWarnings; + /** + * @deprecated use subclasses of {@link ErrorsForCasesWithoutExamplesTestCase} + */ + @Deprecated private final String expectedTypeError; private final boolean canBuildEvaluator; @@ -1436,6 +1441,11 @@ public TestCase(List data, Matcher evaluatorToString, DataTyp this(data, evaluatorToString, expectedType, matcher, null, null, null, null, null, null); } + /** + * Build a test case for type errors. + * @deprecated use a subclass of {@link ErrorsForCasesWithoutExamplesTestCase} instead + */ + @Deprecated public static TestCase typeError(List data, String expectedTypeError) { return new TestCase(data, null, null, null, null, null, expectedTypeError, null, null, null); } @@ -1513,7 +1523,7 @@ public List getDataAsDeepCopiedFields() { } public List getDataAsLiterals() { - return data.stream().map(TypedData::asLiteral).collect(Collectors.toList()); + return data.stream().map(e -> e.mapExpression ? e.asMapExpression() : e.asLiteral()).collect(Collectors.toList()); } public List getDataValues() { @@ -1556,6 +1566,10 @@ public String foldingExceptionMessage() { return foldingExceptionMessage; } + /** + * @deprecated use subclasses of {@link ErrorsForCasesWithoutExamplesTestCase} + */ + @Deprecated public String getExpectedTypeError() { return expectedTypeError; } @@ -1730,6 +1744,7 @@ public static class TypedData { private final String name; private final boolean forceLiteral; private final boolean multiRow; + private final boolean mapExpression; /** * @param data value to test against @@ -1751,6 +1766,7 @@ private TypedData(Object data, DataType type, String name, boolean forceLiteral, this.name = name; this.forceLiteral = forceLiteral; this.multiRow = multiRow; + this.mapExpression = data instanceof MapExpression; } /** @@ -1826,7 +1842,7 @@ public String toString() { */ public Expression asField() { if (forceLiteral) { - return asLiteral(); + return mapExpression ? asMapExpression() : asLiteral(); } return AbstractFunctionTestCase.field(name, type); } @@ -1836,7 +1852,7 @@ public Expression asField() { */ public Expression asDeepCopyOfField() { if (forceLiteral) { - return asLiteral(); + return mapExpression ? asMapExpression() : asLiteral(); } return AbstractFunctionTestCase.deepCopyOfField(name, type); } @@ -1872,6 +1888,13 @@ public List multiRowData() { return (List) data; } + /** + * If the data is a MapExpression, return it as it is. + */ + public MapExpression asMapExpression() { + return mapExpression ? (MapExpression) data : null; + } + /** * @return the data value being supplied, casting to java objects when appropriate */ diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/KqlErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/KqlErrorTests.java new file mode 100644 index 0000000000000..891c419841e70 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/KqlErrorTests.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; + +public class KqlErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(KqlTests.parameters()); + } + + @Override + protected Stream> testCandidates(List cases, Set> valid) { + // Don't test null, as it is not allowed but the expected message is not a type error - so we check it separately in VerifierTests + return super.testCandidates(cases, valid).filter(sig -> false == sig.contains(DataType.NULL)); + } + + @Override + protected Expression build(Source source, List args) { + return new Kql(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "string")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchErrorTests.java new file mode 100644 index 0000000000000..1f4e8e40a8259 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchErrorTests.java @@ -0,0 +1,75 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Locale; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MatchErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MatchTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Match(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + errorMessageStringForMatch(validPerPosition, signature, (l, p) -> p == 0 ? FIELD_TYPE_ERROR_STRING : QUERY_TYPE_ERROR_STRING) + ); + } + + private static String errorMessageStringForMatch( + List> validPerPosition, + List signature, + AbstractFunctionTestCase.PositionalErrorMessageSupplier positionalErrorMessageSupplier + ) { + for (int i = 0; i < signature.size(); i++) { + // Need to check for nulls and bad parameters in order + if (signature.get(i) == DataType.NULL) { + return TypeResolutions.ParamOrdinal.fromIndex(i).name().toLowerCase(Locale.ROOT) + + " argument of [" + + sourceForSignature(signature) + + "] cannot be null, received []"; + } + if (validPerPosition.get(i).contains(signature.get(i)) == false) { + break; + } + } + + try { + return typeErrorMessage(true, validPerPosition, signature, positionalErrorMessageSupplier); + } catch (IllegalStateException e) { + // This means all the positional args were okay, so the expected error is for nulls or from the combination + return EsqlBinaryComparison.formatIncompatibleTypesMessage(signature.get(0), signature.get(1), sourceForSignature(signature)); + } + } + + private static final String FIELD_TYPE_ERROR_STRING = + "keyword, text, boolean, date, date_nanos, double, integer, ip, long, unsigned_long, version"; + + private static final String QUERY_TYPE_ERROR_STRING = + "keyword, boolean, date, date_nanos, double, integer, ip, long, unsigned_long, version"; +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java index f29add60721da..cb0c9b263b547 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/MatchTests.java @@ -11,20 +11,16 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.TypeResolutions; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.FunctionName; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison; import java.math.BigInteger; import java.util.ArrayList; import java.util.List; -import java.util.Locale; -import java.util.Set; import java.util.function.Supplier; import static org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier.stringCases; @@ -33,12 +29,6 @@ @FunctionName("match") public class MatchTests extends AbstractFunctionTestCase { - private static final String FIELD_TYPE_ERROR_STRING = - "keyword, text, boolean, date, date_nanos, double, integer, ip, long, unsigned_long, version"; - - private static final String QUERY_TYPE_ERROR_STRING = - "keyword, boolean, date, date_nanos, double, integer, ip, long, unsigned_long, version"; - public MatchTests(@Name("TestCase") Supplier testCaseSupplier) { this.testCase = testCaseSupplier.get(); } @@ -53,37 +43,7 @@ public static Iterable parameters() { addQueryAsStringTestCases(suppliers); addStringTestCases(suppliers); - return parameterSuppliersFromTypedData( - errorsForCasesWithoutExamples( - suppliers, - (o, v, t) -> errorMessageStringForMatch(o, v, t, (l, p) -> p == 0 ? FIELD_TYPE_ERROR_STRING : QUERY_TYPE_ERROR_STRING) - ) - ); - } - - private static String errorMessageStringForMatch( - boolean includeOrdinal, - List> validPerPosition, - List types, - PositionalErrorMessageSupplier positionalErrorMessageSupplier - ) { - for (int i = 0; i < types.size(); i++) { - // Need to check for nulls and bad parameters in order - if (types.get(i) == DataType.NULL) { - return TypeResolutions.ParamOrdinal.fromIndex(i).name().toLowerCase(Locale.ROOT) - + " argument of [] cannot be null, received [null]"; - } - if (validPerPosition.get(i).contains(types.get(i)) == false) { - break; - } - } - - try { - return typeErrorMessage(includeOrdinal, validPerPosition, types, positionalErrorMessageSupplier); - } catch (IllegalStateException e) { - // This means all the positional args were okay, so the expected error is for nulls or from the combination - return EsqlBinaryComparison.formatIncompatibleTypesMessage(types.get(0), types.get(1), ""); - } + return parameterSuppliersFromTypedData(suppliers); } private static void addNonNumericCases(List suppliers) { @@ -410,10 +370,6 @@ private static void addStringTestCases(List suppliers) { public final void testLiteralExpressions() { Expression expression = buildLiteralExpression(testCase); - if (testCase.getExpectedTypeError() != null) { - assertTypeResolutionFailure(expression); - return; - } assertFalse("expected resolved", expression.typeResolved().unresolved()); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/NoneFieldFullTextFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/NoneFieldFullTextFunctionTestCase.java index 383cb8671053d..d528ee0a92de2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/NoneFieldFullTextFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/NoneFieldFullTextFunctionTestCase.java @@ -28,10 +28,6 @@ public NoneFieldFullTextFunctionTestCase(Supplier tes public final void testFold() { Expression expression = buildLiteralExpression(testCase); - if (testCase.getExpectedTypeError() != null) { - assertTypeResolutionFailure(expression); - return; - } assertFalse("expected resolved", expression.typeResolved().unresolved()); } @@ -46,9 +42,7 @@ protected static Iterable generateParameters() { ) ); } - List errorsSuppliers = errorsForCasesWithoutExamples(suppliers, (v, p) -> "string"); - // Don't test null, as it is not allowed but the expected message is not a type error - so we check it separately in VerifierTests - return parameterSuppliersFromTypedData(errorsSuppliers.stream().filter(s -> s.types().contains(DataType.NULL) == false).toList()); + return parameterSuppliersFromTypedData(suppliers); } private static TestCaseSupplier.TestCase testCase(DataType strType, String str, Matcher matcher) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringErrorTests.java new file mode 100644 index 0000000000000..b55543a0433c3 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/QueryStringErrorTests.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; + +public class QueryStringErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(QueryStringTests.parameters()); + } + + @Override + protected Stream> testCandidates(List cases, Set> valid) { + // Don't test null, as it is not allowed but the expected message is not a type error - so we check it separately in VerifierTests + return super.testCandidates(cases, valid).filter(sig -> false == sig.contains(DataType.NULL)); + } + + @Override + protected Expression build(Source source, List args) { + return new QueryString(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "string")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/TermErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/TermErrorTests.java new file mode 100644 index 0000000000000..a00858e8e1e43 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/TermErrorTests.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.fulltext; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; + +public class TermErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(TermTests.parameters()); + } + + @Override + protected Stream> testCandidates(List cases, Set> valid) { + // Don't test null, as it is not allowed but the expected message is not a type error - so we check it separately in VerifierTests + return super.testCandidates(cases, valid).filter(sig -> false == sig.contains(DataType.NULL)); + } + + @Override + protected Expression build(Source source, List args) { + return new Term(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> "string")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/TermTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/TermTests.java index c1c0dc26880ab..d1df0ed09b28e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/TermTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/fulltext/TermTests.java @@ -45,12 +45,7 @@ public static Iterable parameters() { } } - List suppliersWithErrors = errorsForCasesWithoutExamples(suppliers, (v, p) -> "string"); - - // Don't test null, as it is not allowed but the expected message is not a type error - so we check it separately in VerifierTests - return parameterSuppliersFromTypedData( - suppliersWithErrors.stream().filter(s -> s.types().contains(DataType.NULL) == false).toList() - ); + return parameterSuppliersFromTypedData(suppliers); } protected static List> supportedParams() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AndSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AndSerializationTests.java index 40788eb7a2b2d..fdfdebb159531 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AndSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/AndSerializationTests.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; import java.io.IOException; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/NotSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/NotSerializationTests.java index aa8bad907eeb3..966bb664f2a09 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/NotSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/NotSerializationTests.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Not; import java.io.IOException; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/OrSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/OrSerializationTests.java index 5e68a4af80623..9ceddc9910485 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/OrSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/OrSerializationTests.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import java.io.IOException; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index 23a0f2307171c..b196bd49f6bb2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -770,10 +770,6 @@ public String toString() { } public void testFancyFolding() { - if (testCase.getExpectedTypeError() != null) { - // Nothing to do - return; - } Expression e = buildFieldExpression(testCase); if (extra().foldable == false) { assertThat(e.foldable(), equalTo(false)); @@ -794,7 +790,7 @@ public void testFancyFolding() { } public void testPartialFold() { - if (testCase.getExpectedTypeError() != null || extra().foldable()) { + if (extra().foldable()) { // Nothing to do return; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffFunctionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffFunctionTests.java new file mode 100644 index 0000000000000..e194443a8bc2c --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffFunctionTests.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.date; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.containsString; + +/** + * Tests for {@link DateDiff} that should not run through the normal testing framework + */ +public class DateDiffFunctionTests extends ESTestCase { + + public void testDateDiffFunctionErrorUnitNotValid() { + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DateDiff.process(new BytesRef("sseconds"), 0, 0)); + assertThat( + e.getMessage(), + containsString( + "Received value [sseconds] is not valid date part to add; " + + "did you mean [seconds, second, nanoseconds, milliseconds, microseconds, nanosecond]?" + ) + ); + + e = expectThrows(IllegalArgumentException.class, () -> DateDiff.process(new BytesRef("not-valid-unit"), 0, 0)); + assertThat( + e.getMessage(), + containsString( + "A value of [YEAR, QUARTER, MONTH, DAYOFYEAR, DAY, WEEK, WEEKDAY, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, " + + "NANOSECOND] or their aliases is required; received [not-valid-unit]" + ) + ); + } + +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java index b4a37b0297571..e2e2f0572c7aa 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/date/DateDiffTests.java @@ -11,19 +11,17 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.xpack.esql.core.InvalidArgumentException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; -import java.time.ZonedDateTime; +import java.time.Instant; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; -import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; public class DateDiffTests extends AbstractScalarFunctionTestCase { @@ -33,149 +31,144 @@ public DateDiffTests(@Name("TestCase") Supplier testC @ParametersFactory public static Iterable parameters() { - ZonedDateTime zdtStart = ZonedDateTime.parse("2023-12-04T10:15:30Z"); - ZonedDateTime zdtEnd = ZonedDateTime.parse("2023-12-05T10:45:00Z"); List suppliers = new ArrayList<>(); - suppliers.add( + suppliers.addAll(makeSuppliers(Instant.parse("2023-12-04T10:15:30Z"), Instant.parse("2023-12-05T10:45:00Z"), "seconds", 88170)); + suppliers.addAll(makeSuppliers(Instant.parse("2023-12-12T00:01:01Z"), Instant.parse("2024-12-12T00:01:01Z"), "year", 1)); + suppliers.addAll(makeSuppliers(Instant.parse("2023-12-12T00:01:01.001Z"), Instant.parse("2024-12-12T00:01:01Z"), "year", 0)); + + suppliers.addAll( + makeSuppliers(Instant.parse("2023-12-04T10:15:00Z"), Instant.parse("2023-12-04T10:15:01Z"), "nanoseconds", 1000000000) + ); + suppliers.addAll(makeSuppliers(Instant.parse("2023-12-04T10:15:00Z"), Instant.parse("2023-12-04T10:15:01Z"), "ns", 1000000000)); + suppliers.addAll( + makeSuppliers(Instant.parse("2023-12-04T10:15:00Z"), Instant.parse("2023-12-04T10:15:01Z"), "microseconds", 1000000) + ); + suppliers.addAll(makeSuppliers(Instant.parse("2023-12-04T10:15:00Z"), Instant.parse("2023-12-04T10:15:01Z"), "mcs", 1000000)); + suppliers.addAll(makeSuppliers(Instant.parse("2023-12-04T10:15:00Z"), Instant.parse("2023-12-04T10:15:01Z"), "milliseconds", 1000)); + suppliers.addAll(makeSuppliers(Instant.parse("2023-12-04T10:15:00Z"), Instant.parse("2023-12-04T10:15:01Z"), "ms", 1000)); + suppliers.addAll(makeSuppliers(Instant.parse("2023-12-04T10:15:00Z"), Instant.parse("2023-12-04T10:15:01Z"), "seconds", 1)); + suppliers.addAll(makeSuppliers(Instant.parse("2023-12-04T10:15:00Z"), Instant.parse("2023-12-04T10:15:01Z"), "ss", 1)); + suppliers.addAll(makeSuppliers(Instant.parse("2023-12-04T10:15:00Z"), Instant.parse("2023-12-04T10:15:01Z"), "s", 1)); + + Instant zdtStart = Instant.parse("2023-12-04T10:15:00Z"); + Instant zdtEnd = Instant.parse("2024-12-04T10:15:01Z"); + + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "minutes", 527040)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "mi", 527040)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "n", 527040)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "hours", 8784)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "hh", 8784)); + + // 2024 is a leap year, so the dates are 366 days apart + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "weekdays", 366)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "dw", 366)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "days", 366)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "dd", 366)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "d", 366)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "dy", 366)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "y", 366)); + + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "weeks", 52)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "wk", 52)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "ww", 52)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "months", 12)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "mm", 12)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "m", 12)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "quarters", 4)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "qq", 4)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "q", 4)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "years", 1)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "yyyy", 1)); + suppliers.addAll(makeSuppliers(zdtStart, zdtEnd, "yy", 1)); + + // Error cases + Instant zdtStart2 = Instant.parse("2023-12-04T10:15:00Z"); + Instant zdtEnd2 = Instant.parse("2023-12-04T10:20:00Z"); + suppliers.addAll( + makeSuppliers( + zdtStart2, + zdtEnd2, + "nanoseconds", + "Line -1:-1: org.elasticsearch.xpack.esql.core.InvalidArgumentException: [300000000000] out of [integer] range" + ) + ); + + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); + } + + private static List makeSuppliers(Instant startTimestamp, Instant endTimestamp, String unit, int expected) { + // Units as Keyword case + return List.of( new TestCaseSupplier( - "Date Diff In Seconds - OK", + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, List.of(DataType.KEYWORD, DataType.DATETIME, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataType.KEYWORD, "unit"), - new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataType.DATETIME, "startTimestamp"), - new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataType.DATETIME, "endTimestamp") + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.KEYWORD, "unit"), + new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") ), "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "endTimestamp=Attribute[channel=2]]", DataType.INTEGER, - equalTo(88170) + equalTo(expected) ) - ) - ); - suppliers.add( + ), + // Units as text case new TestCaseSupplier( - "Date Diff In Seconds with text- OK", + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") == " + expected, List.of(DataType.TEXT, DataType.DATETIME, DataType.DATETIME), () -> new TestCaseSupplier.TestCase( List.of( - new TestCaseSupplier.TypedData(new BytesRef("seconds"), DataType.TEXT, "unit"), - new TestCaseSupplier.TypedData(zdtStart.toInstant().toEpochMilli(), DataType.DATETIME, "startTimestamp"), - new TestCaseSupplier.TypedData(zdtEnd.toInstant().toEpochMilli(), DataType.DATETIME, "endTimestamp") + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.TEXT, "unit"), + new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") ), "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "endTimestamp=Attribute[channel=2]]", DataType.INTEGER, - equalTo(88170) + equalTo(expected) ) ) ); - suppliers.add(new TestCaseSupplier("Date Diff In Year - 1", List.of(DataType.KEYWORD, DataType.DATETIME, DataType.DATETIME), () -> { - ZonedDateTime zdtStart2 = ZonedDateTime.parse("2023-12-12T00:01:01Z"); - ZonedDateTime zdtEnd2 = ZonedDateTime.parse("2024-12-12T00:01:01Z"); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("year"), DataType.KEYWORD, "unit"), - new TestCaseSupplier.TypedData(zdtStart2.toInstant().toEpochMilli(), DataType.DATETIME, "startTimestamp"), - new TestCaseSupplier.TypedData(zdtEnd2.toInstant().toEpochMilli(), DataType.DATETIME, "endTimestamp") - ), - "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "endTimestamp=Attribute[channel=2]]", - DataType.INTEGER, - equalTo(1) - ); - })); - suppliers.add(new TestCaseSupplier("Date Diff In Year - 0", List.of(DataType.KEYWORD, DataType.DATETIME, DataType.DATETIME), () -> { - ZonedDateTime zdtStart2 = ZonedDateTime.parse("2023-12-12T00:01:01.001Z"); - ZonedDateTime zdtEnd2 = ZonedDateTime.parse("2024-12-12T00:01:01Z"); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(new BytesRef("year"), DataType.KEYWORD, "unit"), - new TestCaseSupplier.TypedData(zdtStart2.toInstant().toEpochMilli(), DataType.DATETIME, "startTimestamp"), - new TestCaseSupplier.TypedData(zdtEnd2.toInstant().toEpochMilli(), DataType.DATETIME, "endTimestamp") - ), - "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + "endTimestamp=Attribute[channel=2]]", - DataType.INTEGER, - equalTo(0) - ); - })); - return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, suppliers); } - public void testDateDiffFunction() { - ZonedDateTime zdtStart = ZonedDateTime.parse("2023-12-04T10:15:00Z"); - ZonedDateTime zdtEnd = ZonedDateTime.parse("2023-12-04T10:15:01Z"); - long startTimestamp = zdtStart.toInstant().toEpochMilli(); - long endTimestamp = zdtEnd.toInstant().toEpochMilli(); - - assertEquals(1000000000, DateDiff.process(new BytesRef("nanoseconds"), startTimestamp, endTimestamp)); - assertEquals(1000000000, DateDiff.process(new BytesRef("ns"), startTimestamp, endTimestamp)); - assertEquals(1000000, DateDiff.process(new BytesRef("microseconds"), startTimestamp, endTimestamp)); - assertEquals(1000000, DateDiff.process(new BytesRef("mcs"), startTimestamp, endTimestamp)); - assertEquals(1000, DateDiff.process(new BytesRef("milliseconds"), startTimestamp, endTimestamp)); - assertEquals(1000, DateDiff.process(new BytesRef("ms"), startTimestamp, endTimestamp)); - assertEquals(1, DateDiff.process(new BytesRef("seconds"), startTimestamp, endTimestamp)); - assertEquals(1, DateDiff.process(new BytesRef("ss"), startTimestamp, endTimestamp)); - assertEquals(1, DateDiff.process(new BytesRef("s"), startTimestamp, endTimestamp)); - - zdtEnd = zdtEnd.plusYears(1); - endTimestamp = zdtEnd.toInstant().toEpochMilli(); - - assertEquals(527040, DateDiff.process(new BytesRef("minutes"), startTimestamp, endTimestamp)); - assertEquals(527040, DateDiff.process(new BytesRef("mi"), startTimestamp, endTimestamp)); - assertEquals(527040, DateDiff.process(new BytesRef("n"), startTimestamp, endTimestamp)); - assertEquals(8784, DateDiff.process(new BytesRef("hours"), startTimestamp, endTimestamp)); - assertEquals(8784, DateDiff.process(new BytesRef("hh"), startTimestamp, endTimestamp)); - assertEquals(366, DateDiff.process(new BytesRef("weekdays"), startTimestamp, endTimestamp)); - assertEquals(366, DateDiff.process(new BytesRef("dw"), startTimestamp, endTimestamp)); - assertEquals(52, DateDiff.process(new BytesRef("weeks"), startTimestamp, endTimestamp)); - assertEquals(52, DateDiff.process(new BytesRef("wk"), startTimestamp, endTimestamp)); - assertEquals(52, DateDiff.process(new BytesRef("ww"), startTimestamp, endTimestamp)); - assertEquals(366, DateDiff.process(new BytesRef("days"), startTimestamp, endTimestamp)); - assertEquals(366, DateDiff.process(new BytesRef("dd"), startTimestamp, endTimestamp)); - assertEquals(366, DateDiff.process(new BytesRef("d"), startTimestamp, endTimestamp)); - assertEquals(366, DateDiff.process(new BytesRef("dy"), startTimestamp, endTimestamp)); - assertEquals(366, DateDiff.process(new BytesRef("y"), startTimestamp, endTimestamp)); - assertEquals(12, DateDiff.process(new BytesRef("months"), startTimestamp, endTimestamp)); - assertEquals(12, DateDiff.process(new BytesRef("mm"), startTimestamp, endTimestamp)); - assertEquals(12, DateDiff.process(new BytesRef("m"), startTimestamp, endTimestamp)); - assertEquals(4, DateDiff.process(new BytesRef("quarters"), startTimestamp, endTimestamp)); - assertEquals(4, DateDiff.process(new BytesRef("qq"), startTimestamp, endTimestamp)); - assertEquals(4, DateDiff.process(new BytesRef("q"), startTimestamp, endTimestamp)); - assertEquals(1, DateDiff.process(new BytesRef("years"), startTimestamp, endTimestamp)); - assertEquals(1, DateDiff.process(new BytesRef("yyyy"), startTimestamp, endTimestamp)); - assertEquals(1, DateDiff.process(new BytesRef("yy"), startTimestamp, endTimestamp)); - } - - public void testDateDiffFunctionErrorTooLarge() { - ZonedDateTime zdtStart = ZonedDateTime.parse("2023-12-04T10:15:00Z"); - ZonedDateTime zdtEnd = ZonedDateTime.parse("2023-12-04T10:20:00Z"); - long startTimestamp = zdtStart.toInstant().toEpochMilli(); - long endTimestamp = zdtEnd.toInstant().toEpochMilli(); - - InvalidArgumentException e = expectThrows( - InvalidArgumentException.class, - () -> DateDiff.process(new BytesRef("nanoseconds"), startTimestamp, endTimestamp) - ); - assertThat(e.getMessage(), containsString("[300000000000] out of [integer] range")); - } - - public void testDateDiffFunctionErrorUnitNotValid() { - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> DateDiff.process(new BytesRef("sseconds"), 0, 0)); - assertThat( - e.getMessage(), - containsString( - "Received value [sseconds] is not valid date part to add; " - + "did you mean [seconds, second, nanoseconds, milliseconds, microseconds, nanosecond]?" - ) - ); - - e = expectThrows(IllegalArgumentException.class, () -> DateDiff.process(new BytesRef("not-valid-unit"), 0, 0)); - assertThat( - e.getMessage(), - containsString( - "A value of [YEAR, QUARTER, MONTH, DAYOFYEAR, DAY, WEEK, WEEKDAY, HOUR, MINUTE, SECOND, MILLISECOND, MICROSECOND, " - + "NANOSECOND] or their aliases is required; received [not-valid-unit]" + private static List makeSuppliers(Instant startTimestamp, Instant endTimestamp, String unit, String warning) { + // Units as Keyword case + return List.of( + new TestCaseSupplier( + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") -> warning ", + List.of(DataType.KEYWORD, DataType.DATETIME, DataType.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.KEYWORD, "unit"), + new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") + ), + "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + + "endTimestamp=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(null) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning(warning) + ), + // Units as text case + new TestCaseSupplier( + "DateDiff(" + unit + ", " + startTimestamp + ", " + endTimestamp + ") -> warning ", + List.of(DataType.TEXT, DataType.DATETIME, DataType.DATETIME), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BytesRef(unit), DataType.TEXT, "unit"), + new TestCaseSupplier.TypedData(startTimestamp.toEpochMilli(), DataType.DATETIME, "startTimestamp"), + new TestCaseSupplier.TypedData(endTimestamp.toEpochMilli(), DataType.DATETIME, "endTimestamp") + ), + "DateDiffEvaluator[unit=Attribute[channel=0], startTimestamp=Attribute[channel=1], " + + "endTimestamp=Attribute[channel=2]]", + DataType.INTEGER, + equalTo(null) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning(warning) ) ); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogWithBaseInMapSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogWithBaseInMapSerializationTests.java new file mode 100644 index 0000000000000..a2a97e11bfc0f --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/LogWithBaseInMapSerializationTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; +import org.elasticsearch.xpack.esql.expression.function.scalar.map.LogWithBaseInMap; + +import java.io.IOException; + +public class LogWithBaseInMapSerializationTests extends AbstractExpressionSerializationTests { + @Override + protected LogWithBaseInMap createTestInstance() { + Source source = randomSource(); + Expression number = randomChild(); + Expression base = randomBoolean() ? null : randomChild(); + return new LogWithBaseInMap(source, number, base); + } + + @Override + protected LogWithBaseInMap mutateInstance(LogWithBaseInMap instance) throws IOException { + Source source = instance.source(); + Expression number = instance.number(); + Expression base = instance.base(); + if (randomBoolean()) { + number = randomValueOtherThan(number, AbstractExpressionSerializationTests::randomChild); + } else { + base = randomValueOtherThan(base, () -> randomBoolean() ? null : randomChild()); + } + return new LogWithBaseInMap(source, number, base); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundErrorTests.java new file mode 100644 index 0000000000000..54020317bbcfd --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundErrorTests.java @@ -0,0 +1,44 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.math; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class RoundErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(RoundTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new Round(source, args.get(0), args.size() == 1 ? null : args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + typeErrorMessage( + true, + validPerPosition, + signature, + (v, p) -> p == 0 ? "numeric" : "whole number except unsigned_long or counter types" + ) + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java index c05388a9708da..e7a8d2d7ef9d4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/math/RoundTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import java.math.BigInteger; import java.util.ArrayList; import java.util.List; import java.util.function.BiFunction; @@ -26,8 +27,6 @@ import java.util.function.Supplier; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; -import static org.hamcrest.Matchers.nullValue; public class RoundTests extends AbstractScalarFunctionTestCase { public RoundTests(@Name("TestCase") Supplier testCaseSupplier) { @@ -37,11 +36,13 @@ public RoundTests(@Name("TestCase") Supplier testCase @ParametersFactory public static Iterable parameters() { List suppliers = new ArrayList<>(); + + // Double field suppliers.add( supplier( "", DataType.DOUBLE, - () -> 1 / randomDouble(), + () -> randomDoubleBetween(-Double.MAX_VALUE, Double.MAX_VALUE, true), "RoundDoubleNoDecimalsEvaluator[val=Attribute[channel=0]]", d -> Maths.round(d, 0) ) @@ -50,36 +51,252 @@ public static Iterable parameters() { supplier( ", ", DataType.DOUBLE, - () -> 1 / randomDouble(), + () -> randomDoubleBetween(-Double.MAX_VALUE, Double.MAX_VALUE, true), DataType.INTEGER, () -> between(-30, 30), "RoundDoubleEvaluator[val=Attribute[channel=0], decimals=CastIntToLongEvaluator[v=Attribute[channel=1]]]", Maths::round ) ); - // TODO randomized cases for more types - // TODO errorsForCasesWithoutExamples - suppliers = anyNullIsNull( - suppliers, - (nullPosition, nullValueDataType, original) -> nullPosition == 0 ? nullValueDataType : original.expectedType(), - (nullPosition, nullData, original) -> original + suppliers.add( + supplier( + ", ", + DataType.DOUBLE, + () -> randomDoubleBetween(-Double.MAX_VALUE, Double.MAX_VALUE, true), + DataType.LONG, + () -> randomLongBetween(-30, 30), + "RoundDoubleEvaluator[val=Attribute[channel=0], decimals=Attribute[channel=1]]", + Maths::round + ) ); - suppliers.add(new TestCaseSupplier("two doubles", List.of(DataType.DOUBLE, DataType.INTEGER), () -> { - double number1 = 1 / randomDouble(); - double number2 = 1 / randomDouble(); - int precision = between(-30, 30); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(List.of(number1, number2), DataType.DOUBLE, "number"), - new TestCaseSupplier.TypedData(precision, DataType.INTEGER, "decimals") - ), - "RoundDoubleEvaluator[val=Attribute[channel=0], decimals=CastIntToLongEvaluator[v=Attribute[channel=1]]]", - DataType.DOUBLE, - is(nullValue()) - ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") - .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); - })); + // Long decimals + suppliers.add( + supplier( + ", ", + DataType.INTEGER, + ESTestCase::randomInt, + DataType.LONG, + ESTestCase::randomLong, + "RoundIntEvaluator[val=Attribute[channel=0], decimals=Attribute[channel=1]]", + (n, d) -> Maths.round((Number) n, d) + ) + ); + suppliers.add( + supplier( + ", ", + DataType.LONG, + ESTestCase::randomLong, + DataType.LONG, + ESTestCase::randomLong, + "RoundLongEvaluator[val=Attribute[channel=0], decimals=Attribute[channel=1]]", + (n, d) -> Maths.round((Number) n, d) + ) + ); + suppliers.add( + supplier( + ", ", + DataType.UNSIGNED_LONG, + ESTestCase::randomLong, + DataType.LONG, + // Safe negative integer to not trigger an exception and not slow down the test + () -> randomLongBetween(-10_000, Long.MAX_VALUE), + "RoundUnsignedLongEvaluator[val=Attribute[channel=0], decimals=Attribute[channel=1]]", + (n, d) -> Maths.round(NumericUtils.unsignedLongAsBigInteger(n), d) + ) + ); + + // Integer decimals + suppliers.add( + supplier( + ", ", + DataType.INTEGER, + ESTestCase::randomInt, + DataType.INTEGER, + ESTestCase::randomInt, + "RoundIntEvaluator[val=Attribute[channel=0], decimals=CastIntToLongEvaluator[v=Attribute[channel=1]]]", + (n, d) -> Maths.round((Number) n, d) + ) + ); + suppliers.add( + supplier( + ", ", + DataType.LONG, + ESTestCase::randomLong, + DataType.INTEGER, + ESTestCase::randomInt, + "RoundLongEvaluator[val=Attribute[channel=0], decimals=CastIntToLongEvaluator[v=Attribute[channel=1]]]", + (n, d) -> Maths.round((Number) n, d) + ) + ); + suppliers.add( + supplier( + ", ", + DataType.UNSIGNED_LONG, + ESTestCase::randomLong, + DataType.INTEGER, + // Safe negative integer to not trigger an exception and not slow down the test + () -> randomIntBetween(-10_000, Integer.MAX_VALUE), + "RoundUnsignedLongEvaluator[val=Attribute[channel=0], decimals=CastIntToLongEvaluator[v=Attribute[channel=1]]]", + (n, d) -> Maths.round(NumericUtils.unsignedLongAsBigInteger(n), d) + ) + ); + + // Unsigned long errors + suppliers.add( + new TestCaseSupplier( + ", ", + List.of(DataType.UNSIGNED_LONG, DataType.LONG), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BigInteger("18446744073709551615"), DataType.UNSIGNED_LONG, "number"), + new TestCaseSupplier.TypedData(-9223372036854775808L, DataType.LONG, "decimals") + ), + "RoundUnsignedLongEvaluator[val=Attribute[channel=0], decimals=Attribute[channel=1]]", + DataType.UNSIGNED_LONG, + equalTo(BigInteger.ZERO) + ) + ) + ); + suppliers.add( + new TestCaseSupplier( + ", ", + List.of(DataType.UNSIGNED_LONG, DataType.LONG), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BigInteger("18446744073709551615"), DataType.UNSIGNED_LONG, "number"), + new TestCaseSupplier.TypedData(-2147483647L, DataType.LONG, "decimals") + ), + "RoundUnsignedLongEvaluator[val=Attribute[channel=0], decimals=Attribute[channel=1]]", + DataType.UNSIGNED_LONG, + equalTo(BigInteger.ZERO) + ) + ) + ); + suppliers.add( + new TestCaseSupplier( + ", <-20>", + List.of(DataType.UNSIGNED_LONG, DataType.LONG), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BigInteger("18446744073709551615"), DataType.UNSIGNED_LONG, "number"), + new TestCaseSupplier.TypedData(-20L, DataType.LONG, "decimals") + ), + "RoundUnsignedLongEvaluator[val=Attribute[channel=0], decimals=Attribute[channel=1]]", + DataType.UNSIGNED_LONG, + equalTo(BigInteger.ZERO) + ) + ) + ); + suppliers.add( + new TestCaseSupplier( + ", <-19>", + List.of(DataType.UNSIGNED_LONG, DataType.LONG), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BigInteger("18446744073709551615"), DataType.UNSIGNED_LONG, "number"), + new TestCaseSupplier.TypedData(-19L, DataType.LONG, "decimals") + ), + "RoundUnsignedLongEvaluator[val=Attribute[channel=0], decimals=Attribute[channel=1]]", + DataType.UNSIGNED_LONG, + equalTo(null) + ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") + .withWarning("Line -1:-1: java.lang.ArithmeticException: unsigned_long overflow") + ) + ); + suppliers.add( + new TestCaseSupplier( + ", <-19>", + List.of(DataType.UNSIGNED_LONG, DataType.LONG), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(new BigInteger("14446744073709551615"), DataType.UNSIGNED_LONG, "number"), + new TestCaseSupplier.TypedData(-19L, DataType.LONG, "decimals") + ), + "RoundUnsignedLongEvaluator[val=Attribute[channel=0], decimals=Attribute[channel=1]]", + DataType.UNSIGNED_LONG, + equalTo(new BigInteger("10000000000000000000")) + ) + ) + ); + + // Max longs and overflows + suppliers.add( + new TestCaseSupplier( + ", <-20>", + List.of(DataType.LONG, DataType.LONG), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(Long.MAX_VALUE, DataType.LONG, "number"), + new TestCaseSupplier.TypedData(-20L, DataType.LONG, "decimals") + ), + "RoundLongEvaluator[val=Attribute[channel=0], decimals=Attribute[channel=1]]", + DataType.LONG, + equalTo(0L) + ) + ) + ); + suppliers.add( + new TestCaseSupplier( + ", <-19>", + List.of(DataType.LONG, DataType.LONG), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(Long.MAX_VALUE, DataType.LONG, "number"), + new TestCaseSupplier.TypedData(-19L, DataType.LONG, "decimals") + ), + "RoundLongEvaluator[val=Attribute[channel=0], decimals=Attribute[channel=1]]", + DataType.LONG, + equalTo(0L) + ) + ) + ); + suppliers.add( + new TestCaseSupplier( + ", <-18>", + List.of(DataType.LONG, DataType.LONG), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(Long.MAX_VALUE, DataType.LONG, "number"), + new TestCaseSupplier.TypedData(-18L, DataType.LONG, "decimals") + ), + "RoundLongEvaluator[val=Attribute[channel=0], decimals=Attribute[channel=1]]", + DataType.LONG, + equalTo(9000000000000000000L) + ) + ) + ); + // Max integers and overflows + suppliers.add( + new TestCaseSupplier( + ", <-10>", + List.of(DataType.INTEGER, DataType.LONG), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(Integer.MAX_VALUE, DataType.INTEGER, "number"), + new TestCaseSupplier.TypedData(-10L, DataType.LONG, "decimals") + ), + "RoundIntEvaluator[val=Attribute[channel=0], decimals=Attribute[channel=1]]", + DataType.INTEGER, + equalTo(0) + ) + ) + ); + suppliers.add( + new TestCaseSupplier( + ", <-9>", + List.of(DataType.INTEGER, DataType.LONG), + () -> new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(Integer.MAX_VALUE, DataType.INTEGER, "number"), + new TestCaseSupplier.TypedData(-9L, DataType.LONG, "decimals") + ), + "RoundIntEvaluator[val=Attribute[channel=0], decimals=Attribute[channel=1]]", + DataType.INTEGER, + equalTo(2000000000) + ) + ) + ); // Integer or Long without a decimals parameter is a noop suppliers.add(supplier("", DataType.INTEGER, ESTestCase::randomInt, "Attribute[channel=0]", Function.identity())); @@ -128,7 +345,12 @@ public static Iterable parameters() { suppliers.add(supplier(0, 0, 0)); suppliers.add(supplier(123, 2, 123)); suppliers.add(supplier(123, -1, 120)); - return parameterSuppliersFromTypedData(suppliers); + + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors( + (nullPosition, nullValueDataType, original) -> nullPosition == 0 ? nullValueDataType : original.expectedType(), + (nullPosition, nullData, original) -> original, + suppliers + ); } private static TestCaseSupplier supplier(double v, double expected) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendErrorTests.java new file mode 100644 index 0000000000000..df9ab4764c879 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendErrorTests.java @@ -0,0 +1,45 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvAppendErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvAppendTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvAppend(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo( + "second argument of [" + + sourceForSignature(signature) + + "] must be [" + + signature.get(0).noText().typeName() + + "], found value [] type [" + + signature.get(1).typeName() + + "]" + ); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendTests.java index 33733d5e70c61..ca0b997fe0a4f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAppendTests.java @@ -13,15 +13,18 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.geo.GeometryTestUtils; import org.elasticsearch.geo.ShapeTestUtils; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; +import java.util.stream.Stream; import static org.elasticsearch.xpack.esql.EsqlTestUtils.randomLiteral; import static org.elasticsearch.xpack.esql.core.util.SpatialCoordinateTypes.CARTESIAN; @@ -41,8 +44,7 @@ public static Iterable parameters() { longs(suppliers); doubles(suppliers); bytesRefs(suppliers); - nulls(suppliers); - return parameterSuppliersFromTypedData(suppliers); + return parameterSuppliersFromTypedData(anyNullIsNull(true, suppliers)); } @Override @@ -102,7 +104,20 @@ private static void longs(List suppliers) { equalTo(result) ); })); - + suppliers.add(new TestCaseSupplier(List.of(DataType.UNSIGNED_LONG, DataType.UNSIGNED_LONG), () -> { + List field1 = randomList(1, 10, ESTestCase::randomLong); + List field2 = randomList(1, 10, ESTestCase::randomLong); + var result = Stream.concat(field1.stream(), field2.stream()).map(NumericUtils::unsignedLongAsBigInteger).toList(); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.UNSIGNED_LONG, "field1"), + new TestCaseSupplier.TypedData(field2, DataType.UNSIGNED_LONG, "field2") + ), + "MvAppendLongEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.UNSIGNED_LONG, + equalTo(result) + ); + })); suppliers.add(new TestCaseSupplier(List.of(DataType.DATETIME, DataType.DATETIME), () -> { List field1 = randomList(1, 10, () -> randomLong()); List field2 = randomList(1, 10, () -> randomLong()); @@ -118,6 +133,21 @@ private static void longs(List suppliers) { equalTo(result) ); })); + suppliers.add(new TestCaseSupplier(List.of(DataType.DATE_NANOS, DataType.DATE_NANOS), () -> { + List field1 = randomList(1, 10, () -> randomNonNegativeLong()); + List field2 = randomList(1, 10, () -> randomNonNegativeLong()); + var result = new ArrayList<>(field1); + result.addAll(field2); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, DataType.DATE_NANOS, "field1"), + new TestCaseSupplier.TypedData(field2, DataType.DATE_NANOS, "field2") + ), + "MvAppendLongEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.DATE_NANOS, + equalTo(result) + ); + })); } private static void doubles(List suppliers) { @@ -139,54 +169,25 @@ private static void doubles(List suppliers) { } private static void bytesRefs(List suppliers) { - suppliers.add(new TestCaseSupplier(List.of(DataType.KEYWORD, DataType.KEYWORD), () -> { - List field1 = randomList(1, 10, () -> randomLiteral(DataType.KEYWORD).value()); - List field2 = randomList(1, 10, () -> randomLiteral(DataType.KEYWORD).value()); - var result = new ArrayList<>(field1); - result.addAll(field2); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(field1, DataType.KEYWORD, "field1"), - new TestCaseSupplier.TypedData(field2, DataType.KEYWORD, "field2") - ), - "MvAppendBytesRefEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", - DataType.KEYWORD, - equalTo(result) - ); - })); - - suppliers.add(new TestCaseSupplier(List.of(DataType.TEXT, DataType.TEXT), () -> { - List field1 = randomList(1, 10, () -> randomLiteral(DataType.TEXT).value()); - List field2 = randomList(1, 10, () -> randomLiteral(DataType.TEXT).value()); - var result = new ArrayList<>(field1); - result.addAll(field2); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(field1, DataType.TEXT, "field1"), - new TestCaseSupplier.TypedData(field2, DataType.TEXT, "field2") - ), - "MvAppendBytesRefEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", - DataType.TEXT, - equalTo(result) - ); - })); - - suppliers.add(new TestCaseSupplier(List.of(DataType.SEMANTIC_TEXT, DataType.SEMANTIC_TEXT), () -> { - List field1 = randomList(1, 10, () -> randomLiteral(DataType.SEMANTIC_TEXT).value()); - List field2 = randomList(1, 10, () -> randomLiteral(DataType.SEMANTIC_TEXT).value()); - var result = new ArrayList<>(field1); - result.addAll(field2); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(field1, DataType.SEMANTIC_TEXT, "field1"), - new TestCaseSupplier.TypedData(field2, DataType.SEMANTIC_TEXT, "field2") - ), - "MvAppendBytesRefEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", - DataType.SEMANTIC_TEXT, - equalTo(result) - ); - })); - + for (DataType lhs : new DataType[] { DataType.KEYWORD, DataType.TEXT, DataType.SEMANTIC_TEXT }) { + for (DataType rhs : new DataType[] { DataType.KEYWORD, DataType.TEXT, DataType.SEMANTIC_TEXT }) { + suppliers.add(new TestCaseSupplier(List.of(lhs, rhs), () -> { + List field1 = randomList(1, 10, () -> randomLiteral(lhs).value()); + List field2 = randomList(1, 10, () -> randomLiteral(rhs).value()); + var result = new ArrayList<>(field1); + result.addAll(field2); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field1, lhs, "field1"), + new TestCaseSupplier.TypedData(field2, rhs, "field2") + ), + "MvAppendBytesRefEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", + DataType.KEYWORD, + equalTo(result) + ); + })); + } + } suppliers.add(new TestCaseSupplier(List.of(DataType.IP, DataType.IP), () -> { List field1 = randomList(1, 10, () -> randomLiteral(DataType.IP).value()); List field2 = randomList(1, 10, () -> randomLiteral(DataType.IP).value()); @@ -283,31 +284,4 @@ private static void bytesRefs(List suppliers) { ); })); } - - private static void nulls(List suppliers) { - suppliers.add(new TestCaseSupplier(List.of(DataType.INTEGER, DataType.INTEGER), () -> { - List field2 = randomList(2, 10, () -> randomInt()); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(null, DataType.INTEGER, "field1"), - new TestCaseSupplier.TypedData(field2, DataType.INTEGER, "field2") - ), - "MvAppendIntEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", - DataType.INTEGER, - equalTo(null) - ); - })); - suppliers.add(new TestCaseSupplier(List.of(DataType.INTEGER, DataType.INTEGER), () -> { - List field1 = randomList(2, 10, () -> randomInt()); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(field1, DataType.INTEGER, "field1"), - new TestCaseSupplier.TypedData(null, DataType.INTEGER, "field2") - ), - "MvAppendIntEvaluator[field1=Attribute[channel=0], field2=Attribute[channel=1]]", - DataType.INTEGER, - equalTo(null) - ); - })); - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgErrorTests.java new file mode 100644 index 0000000000000..9a9a0796aadcf --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvAvgErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvAvgTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvAvg(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java index af046a5f39d81..702e48c44fa6a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvAvgTests.java @@ -55,7 +55,7 @@ public static Iterable parameters() { */ (size, data) -> avg.apply(size, data.mapToDouble(v -> unsignedLongToDouble(NumericUtils.asLongUnsigned(v)))) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, cases, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, cases); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatErrorTests.java new file mode 100644 index 0000000000000..38022c2c08be6 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvConcatErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvConcatTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvConcat(source, args.get(0), args.get(1)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> "string")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java index 4467b49cd674a..1fd33c2403ca6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvConcatTests.java @@ -67,7 +67,7 @@ public static Iterable parameters() { } } } - return parameterSuppliersFromTypedDataWithDefaultChecks(false, suppliers, (v, p) -> "string"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountErrorTests.java new file mode 100644 index 0000000000000..d59a1aa2eb098 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountErrorTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvCountErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvCountTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvCount(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> { + /* + * In general MvCount should support all signatures. While building a + * new type you may we to temporarily remove this. + */ + throw new UnsupportedOperationException("all signatures should be supported"); + })); + } + + @Override + protected void assertNumberOfCheckedSignatures(int checked) { + /* + * In general MvCount should support all signatures. While building a + * new type you may we to temporarily relax this. + */ + assertThat("all signatures should be supported", checked, equalTo(0)); + } + +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java index 51b15ead26c56..6aeab0339c172 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCountTests.java @@ -41,7 +41,7 @@ public static Iterable parameters() { cartesianPoints(cases, "mv_count", "MvCount", DataType.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); geoShape(cases, "mv_count", "MvCount", DataType.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); cartesianShape(cases, "mv_count", "MvCount", DataType.INTEGER, (size, values) -> equalTo(Math.toIntExact(values.count()))); - return parameterSuppliersFromTypedDataWithDefaultChecks(true, cases, (v, p) -> ""); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(true, cases); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeErrorTests.java new file mode 100644 index 0000000000000..55f34d9a72f41 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeErrorTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvDedupeErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvDedupeTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvDedupe(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> { + /* + * In general MvDedupe should support all signatures. While building a + * new type you may we to temporarily remove this. + */ + throw new UnsupportedOperationException("all signatures should be supported"); + })); + } + + @Override + protected void assertNumberOfCheckedSignatures(int checked) { + /* + * In general MvDedupe should support all signatures. While building a + * new type you may we to temporarily relax this. + */ + assertThat("all signatures should be supported", checked, equalTo(0)); + } + +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java index f3b44274f3ade..24fd0a349796c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvDedupeTests.java @@ -47,9 +47,7 @@ public static Iterable parameters() { cartesianShape(cases, "mv_dedupe", "MvDedupe", DataType.CARTESIAN_SHAPE, (size, values) -> getMatcher(values)); geoPoints(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values)); geoShape(cases, "mv_dedupe", "MvDedupe", DataType.GEO_SHAPE, (size, values) -> getMatcher(values)); - - // TODO switch extraction to BigInteger so this just works. - // unsignedLongs(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values)); + unsignedLongs(cases, "mv_dedupe", "MvDedupe", (size, values) -> getMatcher(values)); return parameterSuppliersFromTypedData(anyNullIsNull(false, cases)); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstErrorTests.java new file mode 100644 index 0000000000000..7ca829a7629c5 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstErrorTests.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvFirstErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvFirstTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvFirst(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> { + /* + * In general MvFirst should support all signatures. While building a + * new type you may we to temporarily remove this. + */ + throw new UnsupportedOperationException("all signatures should be supported"); + })); + } + + @Override + protected void assertNumberOfCheckedSignatures(int checked) { + /* + * In general MvFirst should support all signatures. While building a + * new type you may we to temporarily relax this. + */ + assertThat("all signatures should be supported", checked, equalTo(0)); + } + +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java index f6ef06a84ac2d..3ee98364141c6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvFirstTests.java @@ -42,16 +42,11 @@ public static Iterable parameters() { cartesianPoints(cases, "mv_first", "MvFirst", DataType.CARTESIAN_POINT, (size, values) -> equalTo(values.findFirst().get())); geoShape(cases, "mv_first", "MvFirst", DataType.GEO_SHAPE, (size, values) -> equalTo(values.findFirst().get())); cartesianShape(cases, "mv_first", "MvFirst", DataType.CARTESIAN_SHAPE, (size, values) -> equalTo(values.findFirst().get())); - return parameterSuppliersFromTypedDataWithDefaultChecks(false, cases, (v, p) -> ""); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, cases); } @Override protected Expression build(Source source, Expression field) { return new MvFirst(source, field); } - - @Override - protected DataType expectedType(List argTypes) { - return argTypes.get(0); - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastErrorTests.java new file mode 100644 index 0000000000000..3db13f0368a88 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastErrorTests.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvLastErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvLastTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvLast(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> { + /* + * In general MvLast should support all signatures. While building a + * new type you may we to temporarily remove this. + */ + throw new UnsupportedOperationException("all signatures should be supported"); + })); + } + + @Override + protected void assertNumberOfCheckedSignatures(int checked) { + /* + * In general MvLast should support all signatures. While building a + * new type you may we to temporarily relax this. + */ + assertThat("all signatures should be supported", checked, equalTo(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java index 09e483c3a43ee..a7a13360ce443 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvLastTests.java @@ -42,16 +42,11 @@ public static Iterable parameters() { cartesianPoints(cases, "mv_last", "MvLast", DataType.CARTESIAN_POINT, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); geoShape(cases, "mv_last", "MvLast", DataType.GEO_SHAPE, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); cartesianShape(cases, "mv_last", "MvLast", DataType.CARTESIAN_SHAPE, (size, values) -> equalTo(values.reduce((f, s) -> s).get())); - return parameterSuppliersFromTypedDataWithDefaultChecks(false, cases, (v, p) -> "representable"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, cases); } @Override protected Expression build(Source source, Expression field) { return new MvLast(source, field); } - - @Override - protected DataType expectedType(List argTypes) { - return argTypes.get(0); - } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxErrorTests.java new file mode 100644 index 0000000000000..d406b5157a4b5 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvMaxErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvMaxTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvMax(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "representableNonSpatial")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxTests.java index a3ad1f2415e20..4e4a615e9f5a0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMaxTests.java @@ -39,7 +39,7 @@ public static Iterable parameters() { unsignedLongs(cases, "mv_max", "MvMax", (size, values) -> equalTo(values.reduce(BigInteger::max).get())); dateTimes(cases, "mv_max", "MvMax", (size, values) -> equalTo(values.max().getAsLong())); dateNanos(cases, "mv_max", "MvMax", DataType.DATE_NANOS, (size, values) -> equalTo(values.max().getAsLong())); - return parameterSuppliersFromTypedDataWithDefaultChecks(false, cases, (v, p) -> "representableNonSpatial"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, cases); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationErrorTests.java new file mode 100644 index 0000000000000..a6bced5df46f2 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvMedianAbsoluteDeviationErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvMedianAbsoluteDeviationTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvMedianAbsoluteDeviation(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationTests.java index b041faf6510a1..3e4c8296497d5 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianAbsoluteDeviationTests.java @@ -122,7 +122,7 @@ public static Iterable parameters() { ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(false, cases, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, cases); } /** diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianErrorTests.java new file mode 100644 index 0000000000000..734a240ebe6d3 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvMedianErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvMedianTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvMedian(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "numeric")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianTests.java index 002aa77946bcf..c1435136eed8b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMedianTests.java @@ -92,7 +92,7 @@ public static Iterable parameters() { ) ) ); - return parameterSuppliersFromTypedDataWithDefaultChecks(false, cases, (v, p) -> "numeric"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, cases); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinErrorTests.java new file mode 100644 index 0000000000000..6155c3f987f06 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvMinErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvMinTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvMin(source, args.get(0)); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "representableNonSpatial")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinTests.java index a4d5a4004b840..f958112b93597 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvMinTests.java @@ -39,7 +39,7 @@ public static Iterable parameters() { unsignedLongs(cases, "mv_min", "MvMin", (size, values) -> equalTo(values.reduce(BigInteger::min).get())); dateTimes(cases, "mv_min", "MvMin", (size, values) -> equalTo(values.min().getAsLong())); dateNanos(cases, "mv_min", "MvMin", DataType.DATE_NANOS, (size, values) -> equalTo(values.min().getAsLong())); - return parameterSuppliersFromTypedDataWithDefaultChecks(false, cases, (v, p) -> "representableNonSpatial"); + return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(false, cases); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileSimpleTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileSimpleTests.java index 81ae8efb7aba7..55c3ab7261528 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileSimpleTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvPercentileSimpleTests.java @@ -11,8 +11,8 @@ import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.TestBlockFactory; import static org.hamcrest.Matchers.equalTo; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceErrorTests.java new file mode 100644 index 0000000000000..83d0e4fcf3d75 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceErrorTests.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvSliceErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvSliceTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvSlice(source, args.get(0), args.get(1), args.size() > 2 ? args.get(2) : null); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> switch (p) { + case 1, 2 -> "integer"; + default -> throw new UnsupportedOperationException(); + })); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java index d5284602bf40c..24da717630733 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSliceTests.java @@ -16,9 +16,11 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.core.util.NumericUtils; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import java.math.BigInteger; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -199,6 +201,24 @@ private static void longs(List suppliers) { equalTo(start == end ? field.get(start) : field.subList(start, end + 1)) ); })); + + suppliers.add(new TestCaseSupplier(List.of(DataType.UNSIGNED_LONG, DataType.INTEGER, DataType.INTEGER), () -> { + List field = randomList(1, 10, () -> randomNonNegativeLong()); + List result = field.stream().map(NumericUtils::unsignedLongAsBigInteger).toList(); + int length = field.size(); + int start = randomIntBetween(0, length - 1); + int end = randomIntBetween(start, length - 1); + return new TestCaseSupplier.TestCase( + List.of( + new TestCaseSupplier.TypedData(field, DataType.UNSIGNED_LONG, "field"), + new TestCaseSupplier.TypedData(start, DataType.INTEGER, "start"), + new TestCaseSupplier.TypedData(end, DataType.INTEGER, "end") + ), + "MvSliceLongEvaluator[field=Attribute[channel=0], start=Attribute[channel=1], end=Attribute[channel=2]]", + DataType.UNSIGNED_LONG, + equalTo(start == end ? result.get(start) : result.subList(start, end + 1)) + ); + })); } private static void doubles(List suppliers) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipErrorTests.java new file mode 100644 index 0000000000000..1e03be66d579c --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipErrorTests.java @@ -0,0 +1,37 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.multivalue; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class MvZipErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(MvZipTests.parameters()); + } + + @Override + protected Expression build(Source source, List args) { + return new MvZip(source, args.get(0), args.get(1), args.size() > 2 ? args.get(2) : null); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(true, validPerPosition, signature, (v, p) -> "string")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipTests.java index d415cb55ea632..ae2afe0e3145e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvZipTests.java @@ -52,7 +52,7 @@ public static Iterable parameters() { } } - return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(suppliers, (v, p) -> "string")); + return parameterSuppliersFromTypedData(suppliers); } private static TestCaseSupplier supplier(DataType leftType, DataType rightType, DataType delimType) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullSerializationTests.java index bd309e4a893dc..a4b42a2145327 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullSerializationTests.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNotNull; import java.io.IOException; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullTests.java index 1ff9358b507c3..0af5dc7d0ebeb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNotNullTests.java @@ -11,11 +11,11 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNotNull; import org.hamcrest.Matcher; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullSerializationTests.java index 60bf3085c6d13..6202db6db185d 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullSerializationTests.java @@ -8,9 +8,9 @@ package org.elasticsearch.xpack.esql.expression.function.scalar.nulls; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.expression.AbstractExpressionSerializationTests; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNull; import java.io.IOException; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullTests.java index e80480a636a19..4fdc805b18792 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/nulls/IsNullTests.java @@ -11,11 +11,11 @@ import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; import org.elasticsearch.xpack.esql.core.expression.Expression; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.AbstractScalarFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNull; import org.hamcrest.Matcher; import java.util.ArrayList; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatErrorTests.java new file mode 100644 index 0000000000000..e7afced133c95 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatErrorTests.java @@ -0,0 +1,40 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; + +import static org.hamcrest.Matchers.equalTo; + +public class ConcatErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + List suppliers = paramsToSuppliers(ConcatTests.parameters()); + // TODO support longer lists. Though this thing has 100s so we probably can't do them all. + suppliers.removeIf(s -> s.types().size() > 3); + return suppliers; + } + + @Override + protected Expression build(Source source, List args) { + return new Concat(source, args.get(0), args.subList(1, args.size())); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "string")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java index 42c6284a3c25a..c7358ff4fe947 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/ConcatTests.java @@ -26,7 +26,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.Set; import java.util.function.Supplier; import java.util.stream.IntStream; @@ -48,23 +47,6 @@ public static Iterable parameters() { for (int length = 4; length < 100; length++) { suppliers(suppliers, length); } - Set supported = Set.of(DataType.NULL, DataType.KEYWORD, DataType.TEXT, DataType.SEMANTIC_TEXT); - List> supportedPerPosition = List.of(supported, supported); - for (DataType lhs : DataType.types()) { - if (lhs == DataType.NULL || DataType.isRepresentable(lhs) == false) { - continue; - } - for (DataType rhs : DataType.types()) { - if (rhs == DataType.NULL || DataType.isRepresentable(rhs) == false) { - continue; - } - if (DataType.isString(lhs) && DataType.isString(rhs)) { - continue; - } - - suppliers.add(typeErrorSupplier(false, supportedPerPosition, List.of(lhs, rhs), (v, p) -> "string")); - } - } return parameterSuppliersFromTypedData(suppliers); } @@ -133,7 +115,6 @@ private static void add(List suppliers, String name, int lengt return new TestCaseSupplier.TestCase(values, expectedToString, DataType.KEYWORD, equalTo(new BytesRef(expectedValue))); })); } - } @Override @@ -159,11 +140,6 @@ public void testSomeConstant() { fieldValues.add(new BytesRef("dummy")); } Expression expression = build(testCase.getSource(), mix); - if (testCase.getExpectedTypeError() != null) { - assertTrue("expected unresolved", expression.typeResolved().unresolved()); - assertThat(expression.typeResolved().message(), equalTo(testCase.getExpectedTypeError())); - return; - } int totalLength = testDataLength(); if (totalLength >= Concat.MAX_CONCAT_LENGTH || rarely()) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeErrorTests.java new file mode 100644 index 0000000000000..c73e3da3997f5 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeErrorTests.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; + +public class RLikeErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(RLikeTests.parameters()); + } + + @Override + protected Stream> testCandidates(List cases, Set> valid) { + /* + * We can't support certain signatures, and it's safe not to test them because + * you can't even build them.... The building comes directly from the parser + * and can only make certain types. + */ + return super.testCandidates(cases, valid).filter(sig -> sig.get(1) == DataType.KEYWORD) + .filter(sig -> sig.size() > 2 && sig.get(2) == DataType.BOOLEAN); + } + + @Override + protected Expression build(Source source, List args) { + return RLikeTests.buildRLike(logger, source, args); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "string")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java index 6c41552a9fc52..589477a8bebdc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RLikeTests.java @@ -10,6 +10,7 @@ import com.carrotsearch.randomizedtesting.annotations.Name; import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; +import org.apache.logging.log4j.Logger; import org.apache.lucene.util.BytesRef; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; @@ -25,7 +26,6 @@ import java.util.function.Function; import java.util.function.Supplier; -import static org.elasticsearch.xpack.esql.EsqlTestUtils.randomLiteral; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; import static org.hamcrest.Matchers.startsWith; @@ -69,28 +69,6 @@ static Iterable parameters(Function escapeString, Supp casesForString(cases, "3 bytes, 1 code point", () -> "☕", false, escapeString, optionalPattern); casesForString(cases, "6 bytes, 2 code points", () -> "❗️", false, escapeString, optionalPattern); casesForString(cases, "100 random code points", () -> randomUnicodeOfCodepointLength(100), true, escapeString, optionalPattern); - for (DataType type : DataType.types()) { - if (DataType.isString(type) || type == DataType.NULL) { - continue; - } - if (DataType.isRepresentable(type) == false) { - continue; - } - cases.add( - new TestCaseSupplier( - List.of(type, DataType.KEYWORD, DataType.BOOLEAN), - () -> TestCaseSupplier.TestCase.typeError( - List.of( - new TestCaseSupplier.TypedData(randomLiteral(type).value(), type, "e"), - new TestCaseSupplier.TypedData(new BytesRef(randomAlphaOfLength(10)), DataType.KEYWORD, "pattern") - .forceLiteral(), - new TestCaseSupplier.TypedData(false, DataType.BOOLEAN, "caseInsensitive").forceLiteral() - ), - "argument of [] must be [string], found value [e] type [" + type.typeName() + "]" - ) - ) - ); - } return parameterSuppliersFromTypedData(cases); } @@ -127,12 +105,12 @@ private static void casesForString( private static void cases(List cases, String title, Supplier textAndPattern, boolean expected) { for (DataType type : DataType.stringTypes()) { - cases.add(new TestCaseSupplier(title + " with " + type.esType(), List.of(type, type, DataType.BOOLEAN), () -> { + cases.add(new TestCaseSupplier(title + " with " + type.esType(), List.of(type, DataType.KEYWORD, DataType.BOOLEAN), () -> { TextAndPattern v = textAndPattern.get(); return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(new BytesRef(v.text), type, "e"), - new TestCaseSupplier.TypedData(new BytesRef(v.pattern), type, "pattern").forceLiteral(), + new TestCaseSupplier.TypedData(new BytesRef(v.pattern), DataType.KEYWORD, "pattern").forceLiteral(), new TestCaseSupplier.TypedData(false, DataType.BOOLEAN, "caseInsensitive").forceLiteral() ), startsWith("AutomataMatchEvaluator[input=Attribute[channel=0], pattern=digraph Automaton {\n"), @@ -140,12 +118,12 @@ private static void cases(List cases, String title, Supplier { + cases.add(new TestCaseSupplier(title + " with " + type.esType(), List.of(type, DataType.KEYWORD), () -> { TextAndPattern v = textAndPattern.get(); return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(new BytesRef(v.text), type, "e"), - new TestCaseSupplier.TypedData(new BytesRef(v.pattern), type, "pattern").forceLiteral() + new TestCaseSupplier.TypedData(new BytesRef(v.pattern), DataType.KEYWORD, "pattern").forceLiteral() ), startsWith("AutomataMatchEvaluator[input=Attribute[channel=0], pattern=digraph Automaton {\n"), DataType.BOOLEAN, @@ -157,6 +135,10 @@ private static void cases(List cases, String title, Supplier args) { + return buildRLike(logger, source, args); + } + + static Expression buildRLike(Logger logger, Source source, List args) { Expression expression = args.get(0); Literal pattern = (Literal) args.get(1); Literal caseInsensitive = args.size() > 2 ? (Literal) args.get(2) : null; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatStaticTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatStaticTests.java index 7c8426a5fe3fc..33a490fbde3be 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatStaticTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/RepeatStaticTests.java @@ -18,8 +18,8 @@ import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeErrorTests.java new file mode 100644 index 0000000000000..d6f4fdc699202 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeErrorTests.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.string; + +import org.elasticsearch.xpack.esql.core.expression.Expression; +import org.elasticsearch.xpack.esql.core.tree.Source; +import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.function.ErrorsForCasesWithoutExamplesTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.hamcrest.Matcher; + +import java.util.List; +import java.util.Set; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; + +public class WildcardLikeErrorTests extends ErrorsForCasesWithoutExamplesTestCase { + @Override + protected List cases() { + return paramsToSuppliers(WildcardLikeTests.parameters()); + } + + @Override + protected Stream> testCandidates(List cases, Set> valid) { + /* + * We can't support certain signatures, and it's safe not to test them because + * you can't even build them.... The building comes directly from the parser + * and can only make certain types. + */ + return super.testCandidates(cases, valid).filter(sig -> sig.get(1) == DataType.KEYWORD) + .filter(sig -> sig.size() > 2 && sig.get(2) == DataType.BOOLEAN); + } + + @Override + protected Expression build(Source source, List args) { + return RLikeTests.buildRLike(logger, source, args); + } + + @Override + protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "string")); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java index 6626ac50d60b5..e60c5f77ab42e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/WildcardLikeTests.java @@ -55,7 +55,7 @@ public static Iterable parameters() { private static void addCases(List suppliers) { for (DataType type : new DataType[] { DataType.KEYWORD, DataType.TEXT, DataType.SEMANTIC_TEXT }) { - suppliers.add(new TestCaseSupplier(" with " + type.esType(), List.of(type, type), () -> { + suppliers.add(new TestCaseSupplier(" with " + type.esType(), List.of(type, DataType.KEYWORD), () -> { BytesRef str = new BytesRef(randomAlphaOfLength(5)); String patternString = randomAlphaOfLength(2); BytesRef pattern = new BytesRef(patternString + "*"); @@ -63,7 +63,7 @@ private static void addCases(List suppliers) { return new TestCaseSupplier.TestCase( List.of( new TestCaseSupplier.TypedData(str, type, "str"), - new TestCaseSupplier.TypedData(pattern, type, "pattern").forceLiteral() + new TestCaseSupplier.TypedData(pattern, DataType.KEYWORD, "pattern").forceLiteral() ), startsWith("AutomataMatchEvaluator[input=Attribute[channel=0], pattern=digraph Automaton {\n"), DataType.BOOLEAN, @@ -75,6 +75,10 @@ private static void addCases(List suppliers) { @Override protected Expression build(Source source, List args) { + return buildWildcardLike(source, args); + } + + static Expression buildWildcardLike(Source source, List args) { Expression expression = args.get(0); Literal pattern = (Literal) args.get(1); if (args.size() > 2) { diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/RangeTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/RangeTests.java similarity index 99% rename from x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/RangeTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/RangeTests.java index cd15ed5a94cfc..8094e841636cf 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/RangeTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/RangeTests.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.predicate; +package org.elasticsearch.xpack.esql.expression.predicate; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.FoldContext; diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicOperationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/logical/BinaryLogicOperationTests.java similarity index 96% rename from x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicOperationTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/logical/BinaryLogicOperationTests.java index 05279b74f6382..a67ff584338d6 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/expression/predicate/logical/BinaryLogicOperationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/logical/BinaryLogicOperationTests.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.expression.predicate.logical; +package org.elasticsearch.xpack.esql.expression.predicate.logical; import org.elasticsearch.test.ESTestCase; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java index ca47e0cb329b3..f3746db2b38a6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java @@ -12,12 +12,12 @@ import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.geometry.Point; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.action.ColumnInfoImpl; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.elasticsearch.xpack.esql.core.util.StringUtils; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java index 4e90fe53d96d7..6ca63dfb84f37 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java @@ -13,9 +13,9 @@ import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.geometry.Point; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.action.ColumnInfoImpl; import org.elasticsearch.xpack.esql.action.EsqlExecutionInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java index 8f846edf2b41c..2cc3fc2251409 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/index/EsIndexSerializationTests.java @@ -27,13 +27,14 @@ import java.util.TreeMap; import java.util.TreeSet; +import static org.elasticsearch.core.Tuple.tuple; import static org.elasticsearch.test.ByteSizeEqualsMatcher.byteSizeEquals; public class EsIndexSerializationTests extends AbstractWireSerializingTestCase { public static EsIndex randomEsIndex() { String name = randomAlphaOfLength(5); Map mapping = randomMapping(); - return new EsIndex(name, mapping, randomConcreteIndices()); + return new EsIndex(name, mapping, randomIndexNameWithModes()); } private static Map randomMapping() { @@ -45,13 +46,8 @@ private static Map randomMapping() { return result; } - private static Map randomConcreteIndices() { - int size = between(0, 10); - Map result = new HashMap<>(size); - while (result.size() < size) { - result.put(randomAlphaOfLength(5), randomFrom(IndexMode.values())); - } - return result; + public static Map randomIndexNameWithModes() { + return randomMap(0, 10, () -> tuple(randomIdentifier(), randomFrom(IndexMode.values()))); } @Override @@ -77,7 +73,10 @@ protected EsIndex mutateInstance(EsIndex instance) throws IOException { switch (between(0, 2)) { case 0 -> name = randomValueOtherThan(name, () -> randomAlphaOfLength(5)); case 1 -> mapping = randomValueOtherThan(mapping, EsIndexSerializationTests::randomMapping); - case 2 -> indexedNameWithModes = randomValueOtherThan(indexedNameWithModes, EsIndexSerializationTests::randomConcreteIndices); + case 2 -> indexedNameWithModes = randomValueOtherThan( + indexedNameWithModes, + EsIndexSerializationTests::randomIndexNameWithModes + ); default -> throw new IllegalArgumentException(); } return new EsIndex(name, mapping, indexedNameWithModes); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index 11cd123c731e8..310d680cfbf41 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -23,8 +23,6 @@ import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -33,6 +31,8 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Case; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.string.StartsWith; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.IndexResolution; @@ -580,6 +580,6 @@ protected List filteredWarnings() { } public static EsRelation relation() { - return new EsRelation(EMPTY, new EsIndex(randomAlphaOfLength(8), emptyMap()), randomFrom(IndexMode.values()), randomBoolean()); + return new EsRelation(EMPTY, new EsIndex(randomAlphaOfLength(8), emptyMap()), randomFrom(IndexMode.values())); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index a8f8054fbc6b1..95acc84143614 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -14,12 +14,12 @@ import org.elasticsearch.compute.aggregation.QuantileStates; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.LongVectorBlock; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Tuple; import org.elasticsearch.dissect.DissectParser; import org.elasticsearch.index.IndexMode; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.EsqlTestUtils; -import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; @@ -30,19 +30,17 @@ import org.elasticsearch.xpack.esql.core.expression.Alias; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.AttributeSet; +import org.elasticsearch.xpack.esql.core.expression.EntryExpression; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Expressions; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.MapExpression; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -67,6 +65,7 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToLong; import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToString; +import org.elasticsearch.xpack.esql.expression.function.scalar.map.LogWithBaseInMap; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Round; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvAvg; import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvCount; @@ -77,6 +76,10 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.multivalue.MvSum; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Concat; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; @@ -6220,6 +6223,83 @@ public void testLookupJoinPushDownDisabledForDisjunctionBetweenLeftAndRightField var rightRel = as(join.right(), EsRelation.class); } + /** + * When dropping lookup fields, the lookup relation shouldn't include them. + * At least until we can implement InsertFieldExtract there. + * Expects + * EsqlProject[[languages{f}#10]] + * \_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] + * |_Project[[_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, gender{f}#9, hire_date{f}#14, job{f}#15, job.raw{f}#16, lang + * uages{f}#10, last_name{f}#11, long_noidx{f}#17, salary{f}#12, languages{f}#10 AS language_code]] + * | \_Limit[1000[INTEGER]] + * | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18] + */ + public void testLookupJoinKeepNoLookupFields() { + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + + String commandDiscardingFields = randomBoolean() ? "| KEEP languages" : """ + | DROP _meta_field, emp_no, first_name, gender, language_code, + language_name, last_name, salary, hire_date, job, job.raw, long_noidx + """; + + String query = """ + FROM test + | EVAL language_code = languages + | LOOKUP JOIN languages_lookup ON language_code + """ + commandDiscardingFields; + + var plan = optimizedPlan(query); + + var project = as(plan, Project.class); + assertThat(project.projections().size(), equalTo(1)); + assertThat(project.projections().get(0).name(), equalTo("languages")); + + var join = as(project.child(), Join.class); + var joinRightRelation = as(join.right(), EsRelation.class); + + assertThat(joinRightRelation.output().size(), equalTo(1)); + assertThat(joinRightRelation.output().get(0).name(), equalTo("language_code")); + } + + /** + * Ensure a JOIN shadowed by another JOIN doesn't request the shadowed fields. + * + * Expected + * Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#20]] + * |_Join[LEFT,[language_code{r}#4],[language_code{r}#4],[language_code{f}#18]] + * | |_Eval[[languages{f}#10 AS language_code]] + * | | \_Limit[1000[INTEGER]] + * | | \_EsRelation[test][_meta_field{f}#13, emp_no{f}#7, first_name{f}#8, ge..] + * | \_EsRelation[languages_lookup][LOOKUP][language_code{f}#18] + * \_EsRelation[languages_lookup][LOOKUP][language_code{f}#20, language_name{f}#21] + */ + public void testMultipleLookupShadowing() { + assumeTrue("Requires LOOKUP JOIN", EsqlCapabilities.Cap.JOIN_LOOKUP_V11.isEnabled()); + + String query = """ + FROM test + | EVAL language_code = languages + | LOOKUP JOIN languages_lookup ON language_code + | LOOKUP JOIN languages_lookup ON language_code + """; + + var plan = optimizedPlan(query); + + var finalJoin = as(plan, Join.class); + var finalJoinRightRelation = as(finalJoin.right(), EsRelation.class); + + assertThat(finalJoinRightRelation.output().size(), equalTo(2)); + assertThat(finalJoinRightRelation.output().get(0).name(), equalTo("language_code")); + assertThat(finalJoinRightRelation.output().get(1).name(), equalTo("language_name")); + + var initialJoin = as(finalJoin.left(), Join.class); + var initialJoinRightRelation = as(initialJoin.right(), EsRelation.class); + + assertThat(initialJoinRightRelation.output().size(), equalTo(1)); + assertThat(initialJoinRightRelation.output().get(0).name(), equalTo("language_code")); + } + // // // @@ -6821,4 +6901,34 @@ public void testWhereNull() { var local = as(plan, LocalRelation.class); assertThat(local.supplier(), equalTo(LocalSupplier.EMPTY)); } + + public void testMapExpressionAsFunctionArgument() { + assumeTrue("MapExpression require snapshot build", EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled()); + var query = """ + from test + | EVAL l = log_with_base_in_map(languages, {"base":2.0}) + | KEEP l + """; + var plan = optimizedPlan(query); + Project proj = as(plan, EsqlProject.class); + List fields = proj.projections(); + assertEquals(1, fields.size()); + ReferenceAttribute ra = as(fields.get(0), ReferenceAttribute.class); + assertEquals("l", ra.name()); + assertEquals(DataType.DOUBLE, ra.dataType()); + Eval eval = as(proj.child(), Eval.class); + assertEquals(1, eval.fields().size()); + Alias a = as(eval.fields().get(0), Alias.class); + LogWithBaseInMap l = as(a.child(), LogWithBaseInMap.class); + MapExpression me = as(l.base(), MapExpression.class); + assertEquals(1, me.entryExpressions().size()); + EntryExpression ee = as(me.entryExpressions().get(0), EntryExpression.class); + BytesRef key = as(ee.key().fold(FoldContext.small()), BytesRef.class); + assertEquals("base", key.utf8ToString()); + assertEquals(new Literal(EMPTY, 2.0, DataType.DOUBLE), ee.value()); + assertEquals(DataType.DOUBLE, ee.dataType()); + Limit limit = as(eval.child(), Limit.class); + EsRelation esRelation = as(limit.child(), EsRelation.class); + assertEquals(esRelation.indexPattern(), "test"); + } } diff --git a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java similarity index 94% rename from x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java rename to x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java index 91b0564a5b404..e163d082249b4 100644 --- a/x-pack/plugin/esql-core/src/test/java/org/elasticsearch/xpack/esql/core/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/OptimizerRulesTests.java @@ -4,7 +4,7 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.esql.core.optimizer; +package org.elasticsearch.xpack.esql.optimizer; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.test.ESTestCase; @@ -13,19 +13,19 @@ import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; -import org.elasticsearch.xpack.esql.core.expression.predicate.Range; import org.elasticsearch.xpack.esql.core.tree.NodeInfo; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.TestUtils; +import org.elasticsearch.xpack.esql.expression.predicate.Range; import java.io.IOException; import java.util.Collections; import java.util.List; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.rangeOf; import static org.elasticsearch.xpack.esql.core.type.DataType.BOOLEAN; import static org.elasticsearch.xpack.esql.core.util.TestUtils.of; -import static org.elasticsearch.xpack.esql.core.util.TestUtils.rangeOf; public class OptimizerRulesTests extends ESTestCase { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 504923f6131f0..1eb7f43ee72ba 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -19,6 +19,7 @@ import org.elasticsearch.compute.aggregation.AggregatorMode; import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Tuple; import org.elasticsearch.geometry.Circle; import org.elasticsearch.geometry.Polygon; @@ -40,7 +41,6 @@ import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.EsqlTestUtils.TestConfigurableSearchStats; import org.elasticsearch.xpack.esql.EsqlTestUtils.TestConfigurableSearchStats.Config; -import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; import org.elasticsearch.xpack.esql.analysis.Analyzer; @@ -56,9 +56,6 @@ import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.ReferenceAttribute; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.tree.Node; import org.elasticsearch.xpack.esql.core.tree.Source; @@ -83,6 +80,9 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StDistance; import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToLower; import org.elasticsearch.xpack.esql.expression.function.scalar.string.ToUpper; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.EsqlBinaryComparison; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; @@ -2603,10 +2603,16 @@ public void testFieldExtractWithoutSourceAttributes() { | where round(emp_no) > 10 """)); // Transform the verified plan so that it is invalid (i.e. no source attributes) - List emptyAttrList = List.of(); var badPlan = verifiedPlan.transformDown( EsQueryExec.class, - node -> new EsSourceExec(node.source(), node.index(), emptyAttrList, node.query(), IndexMode.STANDARD) + node -> new EsSourceExec( + node.source(), + node.indexPattern(), + IndexMode.STANDARD, + node.indexNameWithModes(), + List.of(), + node.query() + ) ); var e = expectThrows(VerificationException.class, () -> physicalPlanOptimizer.verify(badPlan)); @@ -2728,8 +2734,7 @@ public void testProjectAwayColumns() { new EsField("some_field2", DataType.KEYWORD, Map.of(), true) ) ), - IndexMode.STANDARD, - false + IndexMode.STANDARD ); Attribute some_field1 = relation.output().get(0); Attribute some_field2 = relation.output().get(1); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsEliminationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsEliminationTests.java index c0c145aee5382..643b8368b7ac6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsEliminationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanFunctionEqualsEliminationTests.java @@ -11,9 +11,9 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Not; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplificationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplificationTests.java index 5b4bf806518de..400cf869f83af 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplificationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/BooleanSimplificationTests.java @@ -10,8 +10,9 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.function.scalar.ScalarFunction; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.optimizer.OptimizerRulesTests; import static org.elasticsearch.xpack.esql.EsqlTestUtils.unboundLogicalOptimizerContext; import static org.elasticsearch.xpack.esql.core.expression.Literal.FALSE; @@ -19,8 +20,7 @@ import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; public class BooleanSimplificationTests extends ESTestCase { - private static final Expression DUMMY_EXPRESSION = - new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 0); + private static final Expression DUMMY_EXPRESSION = new OptimizerRulesTests.DummyBooleanExpression(EMPTY, 0); private Expression booleanSimplification(ScalarFunction e) { return new BooleanSimplification().rule(e, unboundLogicalOptimizerContext()); @@ -47,10 +47,12 @@ public void testBoolSimplifyAnd() { } public void testBoolCommonFactorExtraction() { - Expression a1 = new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 1); - Expression a2 = new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 1); - Expression b = new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 2); - Expression c = new org.elasticsearch.xpack.esql.core.optimizer.OptimizerRulesTests.DummyBooleanExpression(EMPTY, 3); + BooleanSimplification simplification = new BooleanSimplification(); + + Expression a1 = new OptimizerRulesTests.DummyBooleanExpression(EMPTY, 1); + Expression a2 = new OptimizerRulesTests.DummyBooleanExpression(EMPTY, 1); + Expression b = new OptimizerRulesTests.DummyBooleanExpression(EMPTY, 2); + Expression c = new OptimizerRulesTests.DummyBooleanExpression(EMPTY, 3); Or actual = new Or(EMPTY, new And(EMPTY, a1, b), new And(EMPTY, a2, c)); And expected = new And(EMPTY, a1, new Or(EMPTY, b, c)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisonsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisonsTests.java index a0d23731ae82d..8c785f5e60fd4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisonsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineBinaryComparisonsTests.java @@ -11,9 +11,9 @@ import org.elasticsearch.xpack.esql.EsqlTestUtils; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.BinaryLogic; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctionsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctionsTests.java index bb5f2fd3505e9..23cf6c2594365 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctionsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/CombineDisjunctionsTests.java @@ -11,12 +11,12 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.util.CollectionUtils; import org.elasticsearch.xpack.esql.expression.function.scalar.ip.CIDRMatch; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.In; import org.elasticsearch.xpack.esql.plan.logical.Filter; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java index 8a8585b8d0ab5..864a59338c6a9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ConstantFoldingTests.java @@ -15,15 +15,15 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.Nullability; import org.elasticsearch.xpack.esql.core.expression.predicate.BinaryOperator; -import org.elasticsearch.xpack.esql.core.expression.predicate.Range; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; +import org.elasticsearch.xpack.esql.expression.predicate.Range; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNullTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNullTests.java index 252b25a214bb8..254b9197204a0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNullTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/FoldNullTests.java @@ -11,10 +11,6 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; @@ -53,6 +49,10 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.LTrim; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNotNull; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEqualsTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEqualsTests.java index a6c0d838b2c21..128e1162c0bed 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEqualsTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateEqualsTests.java @@ -11,12 +11,12 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.expression.predicate.Range; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.BinaryLogic; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.expression.predicate.Range; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.BinaryLogic; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullableTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullableTests.java index d35890e5b56bb..479b5072864f0 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullableTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PropagateNullableTests.java @@ -11,13 +11,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNotNull; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.plan.logical.Filter; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java index bc22fbb6bd828..3e0ae4f97e405 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/PushDownAndCombineFiltersTests.java @@ -13,16 +13,15 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; -import org.elasticsearch.xpack.esql.core.expression.predicate.Predicates; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; import org.elasticsearch.xpack.esql.expression.function.scalar.math.Pow; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; +import org.elasticsearch.xpack.esql.expression.predicate.Predicates; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThan; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.GreaterThanOrEqual; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.LessThan; -import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.plan.logical.Aggregate; import org.elasticsearch.xpack.esql.plan.logical.EsRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; @@ -33,9 +32,9 @@ import java.util.ArrayList; import java.util.List; +import java.util.Map; import static java.util.Collections.emptyList; -import static java.util.Collections.emptyMap; import static java.util.Collections.singletonList; import static org.elasticsearch.xpack.esql.EsqlTestUtils.FOUR; import static org.elasticsearch.xpack.esql.EsqlTestUtils.ONE; @@ -251,12 +250,6 @@ private static EsRelation relation() { } private static EsRelation relation(List fieldAttributes) { - return new EsRelation( - EMPTY, - new EsIndex(randomAlphaOfLength(8), emptyMap()), - fieldAttributes, - randomFrom(IndexMode.values()), - randomBoolean() - ); + return new EsRelation(EMPTY, randomIdentifier(), randomFrom(IndexMode.values()), Map.of(), fieldAttributes); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java index b9ffc39e5e130..449728f43a3a2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/logical/ReplaceRegexMatchTests.java @@ -11,13 +11,13 @@ import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; -import org.elasticsearch.xpack.esql.core.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RLikePattern; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.RegexMatch; import org.elasticsearch.xpack.esql.core.expression.predicate.regex.WildcardPattern; import org.elasticsearch.xpack.esql.core.util.StringUtils; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; +import org.elasticsearch.xpack.esql.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.esql.expression.predicate.operator.comparison.Equals; import static java.util.Arrays.asList; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java index 90c8ae1032325..b7eadc243d977 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/rules/physical/local/PushTopNToSourceTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.xpack.esql.expression.Order; import org.elasticsearch.xpack.esql.expression.function.scalar.spatial.StDistance; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; -import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.optimizer.LocalPhysicalOptimizerContext; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.EvalExec; @@ -582,9 +581,8 @@ public TestPhysicalPlanBuilder limit(int limit) { } public TopNExec build() { - EsIndex esIndex = new EsIndex(this.index, Map.of()); List attributes = new ArrayList<>(fields.values()); - PhysicalPlan child = new EsQueryExec(Source.EMPTY, esIndex, indexMode, attributes, null, null, List.of(), 0); + PhysicalPlan child = new EsQueryExec(Source.EMPTY, this.index, indexMode, Map.of(), attributes, null, null, List.of(), 0); if (aliases.isEmpty() == false) { child = new EvalExec(Source.EMPTY, child, aliases); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java index 99a04b6ed8f10..31ea4f2712b98 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/AbstractStatementParserTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.xpack.esql.VerificationException; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; +import org.elasticsearch.xpack.esql.core.expression.MapExpression; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; @@ -24,6 +25,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.List; +import java.util.Map; import static org.elasticsearch.xpack.esql.core.tree.Source.EMPTY; import static org.elasticsearch.xpack.esql.core.util.NumericUtils.asLongUnsigned; @@ -125,6 +127,18 @@ static Literal literalStrings(String... strings) { return new Literal(EMPTY, Arrays.asList(strings), DataType.KEYWORD); } + static MapExpression mapExpression(Map keyValuePairs) { + List ees = new ArrayList<>(keyValuePairs.size()); + for (Map.Entry entry : keyValuePairs.entrySet()) { + String key = entry.getKey(); + Object value = entry.getValue(); + DataType type = (value instanceof List l) ? DataType.fromJava(l.get(0)) : DataType.fromJava(value); + ees.add(new Literal(EMPTY, key, DataType.KEYWORD)); + ees.add(new Literal(EMPTY, value, type)); + } + return new MapExpression(EMPTY, ees); + } + void expectError(String query, String errorMessage) { ParsingException e = expectThrows(ParsingException.class, "Expected syntax error for " + query, () -> statement(query)); assertThat(e.getMessage(), containsString(errorMessage)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java index 85d4017b166fa..6591f498e85aa 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/ExpressionTests.java @@ -14,12 +14,12 @@ import org.elasticsearch.xpack.esql.core.expression.Literal; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; import org.elasticsearch.xpack.esql.core.expression.UnresolvedStar; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.UnresolvedNamePattern; import org.elasticsearch.xpack.esql.expression.function.UnresolvedFunction; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mul; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index a6243d25ba579..3b7ae5adcd8b2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.parser; import org.elasticsearch.Build; +import org.elasticsearch.common.logging.LoggerMessageFormat; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexMode; import org.elasticsearch.xpack.esql.action.EsqlCapabilities; @@ -21,8 +22,6 @@ import org.elasticsearch.xpack.esql.core.expression.MetadataAttribute; import org.elasticsearch.xpack.esql.core.expression.NamedExpression; import org.elasticsearch.xpack.esql.core.expression.UnresolvedAttribute; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.expression.Order; @@ -33,6 +32,8 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.convert.ToInteger; import org.elasticsearch.xpack.esql.expression.function.scalar.string.RLike; import org.elasticsearch.xpack.esql.expression.function.scalar.string.WildcardLike; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; @@ -64,6 +65,7 @@ import java.util.ArrayList; import java.util.HashMap; +import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.function.Function; @@ -2358,4 +2360,583 @@ public void testFailingMetadataWithSquareBrackets() { "line 1:11: mismatched input '[' expecting {, '|', ',', 'metadata'}" ); } + + public void testNamedFunctionArgumentInMap() { + assumeTrue( + "named function arguments require snapshot build", + EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() + ); + // functions can be scalar, grouping and aggregation + // functions can be in eval/where/stats/sort/dissect/grok commands, commands in snapshot are not covered + // positive + // In eval and where clause as function arguments + LinkedHashMap expectedMap1 = new LinkedHashMap<>(4); + expectedMap1.put("option1", "string"); + expectedMap1.put("option2", 1); + expectedMap1.put("option3", List.of(2.0, 3.0, 4.0)); + expectedMap1.put("option4", List.of(true, false)); + LinkedHashMap expectedMap2 = new LinkedHashMap<>(4); + expectedMap2.put("option1", List.of("string1", "string2")); + expectedMap2.put("option2", List.of(1, 2, 3)); + expectedMap2.put("option3", 2.0); + expectedMap2.put("option4", true); + LinkedHashMap expectedMap3 = new LinkedHashMap<>(4); + expectedMap3.put("option1", "string"); + expectedMap3.put("option2", 2.0); + expectedMap3.put("option3", List.of(1, 2, 3)); + expectedMap3.put("option4", List.of(true, false)); + + assertEquals( + new Filter( + EMPTY, + new Eval( + EMPTY, + relation("test"), + List.of( + new Alias( + EMPTY, + "x", + function( + "fn1", + List.of(attribute("f1"), new Literal(EMPTY, "testString", KEYWORD), mapExpression(expectedMap1)) + ) + ) + ) + ), + new Equals( + EMPTY, + attribute("y"), + function("fn2", List.of(new Literal(EMPTY, "testString", KEYWORD), mapExpression(expectedMap2))) + ) + ), + statement(""" + from test + | eval x = fn1(f1, "testString", {"option1":"string","option2":1,"option3":[2.0,3.0,4.0],"option4":[true,false]}) + | where y == fn2("testString", {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"option4":true}) + """) + ); + + // In stats, by and sort as function arguments + assertEquals( + new OrderBy( + EMPTY, + new Aggregate( + EMPTY, + relation("test"), + Aggregate.AggregateType.STANDARD, + List.of( + new Alias( + EMPTY, + "fn2(f3, {\"option1\":[\"string1\",\"string2\"],\"option2\":[1,2,3],\"option3\":2.0,\"option4\":true})", + function("fn2", List.of(attribute("f3"), mapExpression(expectedMap2))) + ) + ), + List.of( + new Alias(EMPTY, "x", function("fn1", List.of(attribute("f1"), attribute("f2"), mapExpression(expectedMap1)))), + attribute("fn2(f3, {\"option1\":[\"string1\",\"string2\"],\"option2\":[1,2,3],\"option3\":2.0,\"option4\":true})") + ) + ), + List.of( + new Order( + EMPTY, + function("fn3", List.of(attribute("f4"), mapExpression(expectedMap3))), + Order.OrderDirection.ASC, + Order.NullsPosition.LAST + ) + ) + ), + statement(""" + from test + | stats x = fn1(f1, f2, {"option1":"string","option2":1,"option3":[2.0,3.0,4.0],"option4":[true,false]}) + by fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"option4":true}) + | sort fn3(f4, {"option1":"string","option2":2.0,"option3":[1,2,3],"option4":[true,false]}) + """) + ); + + // In dissect and grok as function arguments + LogicalPlan plan = statement(""" + from test + | dissect fn1(f1, f2, {"option1":"string", "option2":1,"option3":[2.0,3.0,4.0],"option4":[true,false]}) "%{bar}" + | grok fn2(f3, {"option1":["string1","string2"],"option2":[1,2,3],"option3":2.0,"option4":true}) "%{WORD:foo}" + """); + Grok grok = as(plan, Grok.class); + assertEquals(function("fn2", List.of(attribute("f3"), mapExpression(expectedMap2))), grok.input()); + assertEquals("%{WORD:foo}", grok.parser().pattern()); + assertEquals(List.of(referenceAttribute("foo", KEYWORD)), grok.extractedFields()); + Dissect dissect = as(grok.child(), Dissect.class); + assertEquals(function("fn1", List.of(attribute("f1"), attribute("f2"), mapExpression(expectedMap1))), dissect.input()); + assertEquals("%{bar}", dissect.parser().pattern()); + assertEquals("", dissect.parser().appendSeparator()); + assertEquals(List.of(referenceAttribute("bar", KEYWORD)), dissect.extractedFields()); + UnresolvedRelation ur = as(dissect.child(), UnresolvedRelation.class); + assertEquals(ur, relation("test")); + + // map entry values provided in named parameter, arrays are not supported by named parameters yet + assertEquals( + new Filter( + EMPTY, + new Eval( + EMPTY, + relation("test"), + List.of( + new Alias( + EMPTY, + "x", + function( + "fn1", + List.of(attribute("f1"), new Literal(EMPTY, "testString", KEYWORD), mapExpression(expectedMap1)) + ) + ) + ) + ), + new Equals( + EMPTY, + attribute("y"), + function("fn2", List.of(new Literal(EMPTY, "testString", KEYWORD), mapExpression(expectedMap2))) + ) + ), + statement( + """ + from test + | eval x = ?fn1(?n1, ?n2, {"option1":?n3,"option2":?n4,"option3":[2.0,3.0,4.0],"option4":[true,false]}) + | where y == ?fn2(?n2, {"option1":["string1","string2"],"option2":[1,2,3],"option3":?n5,"option4":?n6}) + """, + new QueryParams( + List.of( + paramAsIdentifier("fn1", "fn1"), + paramAsIdentifier("fn2", "fn2"), + paramAsIdentifier("n1", "f1"), + paramAsConstant("n2", "testString"), + paramAsConstant("n3", "string"), + paramAsConstant("n4", 1), + paramAsConstant("n5", 2.0), + paramAsConstant("n6", true) + ) + ) + ) + ); + + assertEquals( + new OrderBy( + EMPTY, + new Aggregate( + EMPTY, + relation("test"), + Aggregate.AggregateType.STANDARD, + List.of( + new Alias( + EMPTY, + "?fn2(?n7, {\"option1\":[\"string1\",\"string2\"],\"option2\":[1,2,3],\"option3\":?n5,\"option4\":?n6})", + function("fn2", List.of(attribute("f3"), mapExpression(expectedMap2))) + ) + ), + List.of( + new Alias(EMPTY, "x", function("fn1", List.of(attribute("f1"), attribute("f2"), mapExpression(expectedMap1)))), + attribute("?fn2(?n7, {\"option1\":[\"string1\",\"string2\"],\"option2\":[1,2,3],\"option3\":?n5,\"option4\":?n6})") + ) + ), + List.of( + new Order( + EMPTY, + function("fn3", List.of(attribute("f4"), mapExpression(expectedMap3))), + Order.OrderDirection.ASC, + Order.NullsPosition.LAST + ) + ) + ), + statement( + """ + from test + | stats x = ?fn1(?n1, ?n2, {"option1":?n3,"option2":?n4,"option3":[2.0,3.0,4.0],"option4":[true,false]}) + by ?fn2(?n7, {"option1":["string1","string2"],"option2":[1,2,3],"option3":?n5,"option4":?n6}) + | sort ?fn3(?n8, {"option1":?n3,"option2":?n5,"option3":[1,2,3],"option4":[true,false]}) + """, + new QueryParams( + List.of( + paramAsIdentifier("fn1", "fn1"), + paramAsIdentifier("fn2", "fn2"), + paramAsIdentifier("fn3", "fn3"), + paramAsIdentifier("n1", "f1"), + paramAsIdentifier("n2", "f2"), + paramAsConstant("n3", "string"), + paramAsConstant("n4", 1), + paramAsConstant("n5", 2.0), + paramAsConstant("n6", true), + paramAsIdentifier("n7", "f3"), + paramAsIdentifier("n8", "f4") + ) + ) + ) + ); + + plan = statement( + """ + from test + | dissect ?fn1(?n1, ?n2, {"option1":?n3,"option2":?n4,"option3":[2.0,3.0,4.0],"option4":[true,false]}) "%{bar}" + | grok ?fn2(?n7, {"option1":["string1","string2"],"option2":[1,2,3],"option3":?n5,"option4":?n6}) "%{WORD:foo}" + """, + new QueryParams( + List.of( + paramAsIdentifier("fn1", "fn1"), + paramAsIdentifier("fn2", "fn2"), + paramAsIdentifier("n1", "f1"), + paramAsIdentifier("n2", "f2"), + paramAsConstant("n3", "string"), + paramAsConstant("n4", 1), + paramAsConstant("n5", 2.0), + paramAsConstant("n6", true), + paramAsIdentifier("n7", "f3") + ) + ) + ); + grok = as(plan, Grok.class); + assertEquals(function("fn2", List.of(attribute("f3"), mapExpression(expectedMap2))), grok.input()); + assertEquals("%{WORD:foo}", grok.parser().pattern()); + assertEquals(List.of(referenceAttribute("foo", KEYWORD)), grok.extractedFields()); + dissect = as(grok.child(), Dissect.class); + assertEquals(function("fn1", List.of(attribute("f1"), attribute("f2"), mapExpression(expectedMap1))), dissect.input()); + assertEquals("%{bar}", dissect.parser().pattern()); + assertEquals("", dissect.parser().appendSeparator()); + assertEquals(List.of(referenceAttribute("bar", KEYWORD)), dissect.extractedFields()); + ur = as(dissect.child(), UnresolvedRelation.class); + assertEquals(ur, relation("test")); + } + + public void testNamedFunctionArgumentWithCaseSensitiveKeys() { + assumeTrue( + "named function arguments require snapshot build", + EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() + ); + LinkedHashMap expectedMap1 = new LinkedHashMap<>(3); + expectedMap1.put("option", "string"); + expectedMap1.put("Option", 1); + expectedMap1.put("oPtion", List.of(2.0, 3.0, 4.0)); + LinkedHashMap expectedMap2 = new LinkedHashMap<>(3); + expectedMap2.put("option", List.of("string1", "string2")); + expectedMap2.put("Option", List.of(1, 2, 3)); + expectedMap2.put("oPtion", 2.0); + + assertEquals( + new Filter( + EMPTY, + new Eval( + EMPTY, + relation("test"), + List.of( + new Alias( + EMPTY, + "x", + function( + "fn1", + List.of(attribute("f1"), new Literal(EMPTY, "testString", KEYWORD), mapExpression(expectedMap1)) + ) + ) + ) + ), + new Equals( + EMPTY, + attribute("y"), + function("fn2", List.of(new Literal(EMPTY, "testString", KEYWORD), mapExpression(expectedMap2))) + ) + ), + statement(""" + from test + | eval x = fn1(f1, "testString", {"option":"string","Option":1,"oPtion":[2.0,3.0,4.0]}) + | where y == fn2("testString", {"option":["string1","string2"],"Option":[1,2,3],"oPtion":2.0}) + """) + ); + } + + public void testMultipleNamedFunctionArgumentsNotAllowed() { + assumeTrue( + "named function arguments require snapshot build", + EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() + ); + Map commands = Map.ofEntries( + Map.entry("eval x = {}", "41"), + Map.entry("where {}", "38"), + Map.entry("stats {}", "38"), + Map.entry("stats agg() by {}", "47"), + Map.entry("sort {}", "37"), + Map.entry("dissect {} \"%{bar}\"", "40"), + Map.entry("grok {} \"%{WORD:foo}\"", "37") + ); + + for (Map.Entry command : commands.entrySet()) { + String cmd = command.getKey(); + String error = command.getValue(); + String errorMessage = cmd.startsWith("dissect") || cmd.startsWith("grok") + ? "mismatched input ',' expecting ')'" + : "no viable alternative at input 'fn(f1,"; + expectError( + LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\"option\":1}, {\"option\":2})"), + LoggerMessageFormat.format(null, "line 1:{}: {}", error, errorMessage) + ); + } + } + + public void testNamedFunctionArgumentNotInMap() { + assumeTrue( + "named function arguments require snapshot build", + EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() + ); + Map commands = Map.ofEntries( + Map.entry("eval x = {}", "38"), + Map.entry("where {}", "35"), + Map.entry("stats {}", "35"), + Map.entry("stats agg() by {}", "44"), + Map.entry("sort {}", "34"), + Map.entry("dissect {} \"%{bar}\"", "37"), + Map.entry("grok {} \"%{WORD:foo}\"", "34") + ); + + for (Map.Entry command : commands.entrySet()) { + String cmd = command.getKey(); + String error = command.getValue(); + String errorMessage = cmd.startsWith("dissect") || cmd.startsWith("grok") + ? "extraneous input ':' expecting {',', ')'}" + : "no viable alternative at input 'fn(f1, \"option1\":'"; + expectError( + LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, \"option1\":\"string\")"), + LoggerMessageFormat.format(null, "line 1:{}: {}", error, errorMessage) + ); + } + } + + public void testNamedFunctionArgumentNotConstant() { + assumeTrue( + "named function arguments require snapshot build", + EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() + ); + Map commands = Map.ofEntries( + Map.entry("eval x = {}", new String[] { "31", "35" }), + Map.entry("where {}", new String[] { "28", "32" }), + Map.entry("stats {}", new String[] { "28", "32" }), + Map.entry("stats agg() by {}", new String[] { "37", "41" }), + Map.entry("sort {}", new String[] { "27", "31" }), + Map.entry("dissect {} \"%{bar}\"", new String[] { "30", "34" }), + Map.entry("grok {} \"%{WORD:foo}\"", new String[] { "27", "31" }) + ); + + for (Map.Entry command : commands.entrySet()) { + String cmd = command.getKey(); + String error1 = command.getValue()[0]; + String error2 = command.getValue()[1]; + String errorMessage1 = cmd.startsWith("dissect") || cmd.startsWith("grok") + ? "mismatched input '1' expecting QUOTED_STRING" + : "no viable alternative at input 'fn(f1, { 1'"; + String errorMessage2 = cmd.startsWith("dissect") || cmd.startsWith("grok") + ? "mismatched input 'string' expecting {QUOTED_STRING" + : "no viable alternative at input 'fn(f1, {"; + expectError( + LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, { 1:\"string\" })"), + LoggerMessageFormat.format(null, "line 1:{}: {}", error1, errorMessage1) + ); + expectError( + LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, { \"1\":string })"), + LoggerMessageFormat.format(null, "line 1:{}: {}", error2, errorMessage2) + ); + } + } + + public void testNamedFunctionArgumentEmptyMap() { + assumeTrue( + "named function arguments require snapshot build", + EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() + ); + Map commands = Map.ofEntries( + Map.entry("eval x = {}", "30"), + Map.entry("where {}", "27"), + Map.entry("stats {}", "27"), + Map.entry("stats agg() by {}", "36"), + Map.entry("sort {}", "26"), + Map.entry("dissect {} \"%{bar}\"", "29"), + Map.entry("grok {} \"%{WORD:foo}\"", "26") + ); + + for (Map.Entry command : commands.entrySet()) { + String cmd = command.getKey(); + String error = command.getValue(); + String errorMessage = cmd.startsWith("dissect") || cmd.startsWith("grok") + ? "mismatched input '}' expecting QUOTED_STRING" + : "no viable alternative at input 'fn(f1, {}'"; + expectError( + LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {}})"), + LoggerMessageFormat.format(null, "line 1:{}: {}", error, errorMessage) + ); + } + } + + public void testNamedFunctionArgumentMapWithNULL() { + assumeTrue( + "named function arguments require snapshot build", + EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() + ); + Map commands = Map.ofEntries( + Map.entry("eval x = {}", "29"), + Map.entry("where {}", "26"), + Map.entry("stats {}", "26"), + Map.entry("stats agg() by {}", "35"), + Map.entry("sort {}", "25"), + Map.entry("dissect {} \"%{bar}\"", "28"), + Map.entry("grok {} \"%{WORD:foo}\"", "25") + ); + + for (Map.Entry command : commands.entrySet()) { + String cmd = command.getKey(); + String error = command.getValue(); + expectError( + LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\"option\":null})"), + LoggerMessageFormat.format( + null, + "line 1:{}: {}", + error, + "Invalid named function argument [\"option\":null], NULL is not supported" + ) + ); + } + } + + public void testNamedFunctionArgumentMapWithEmptyKey() { + assumeTrue( + "named function arguments require snapshot build", + EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() + ); + Map commands = Map.ofEntries( + Map.entry("eval x = {}", "29"), + Map.entry("where {}", "26"), + Map.entry("stats {}", "26"), + Map.entry("stats agg() by {}", "35"), + Map.entry("sort {}", "25"), + Map.entry("dissect {} \"%{bar}\"", "28"), + Map.entry("grok {} \"%{WORD:foo}\"", "25") + ); + + for (Map.Entry command : commands.entrySet()) { + String cmd = command.getKey(); + String error = command.getValue(); + expectError( + LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\"\":1})"), + LoggerMessageFormat.format( + null, + "line 1:{}: {}", + error, + "Invalid named function argument [\"\":1], empty key is not supported" + ) + ); + expectError( + LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\" \":1})"), + LoggerMessageFormat.format( + null, + "line 1:{}: {}", + error, + "Invalid named function argument [\" \":1], empty key is not supported" + ) + ); + } + } + + public void testNamedFunctionArgumentMapWithDuplicatedKey() { + assumeTrue( + "named function arguments require snapshot build", + EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() + ); + Map commands = Map.ofEntries( + Map.entry("eval x = {}", "29"), + Map.entry("where {}", "26"), + Map.entry("stats {}", "26"), + Map.entry("stats agg() by {}", "35"), + Map.entry("sort {}", "25"), + Map.entry("dissect {} \"%{bar}\"", "28"), + Map.entry("grok {} \"%{WORD:foo}\"", "25") + ); + + for (Map.Entry command : commands.entrySet()) { + String cmd = command.getKey(); + String error = command.getValue(); + expectError( + LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\"dup\":1,\"dup\":2})"), + LoggerMessageFormat.format( + null, + "line 1:{}: {}", + error, + "Duplicated function arguments with the same name [dup] is not supported" + ) + ); + } + } + + public void testNamedFunctionArgumentInInvalidPositions() { + assumeTrue( + "named function arguments require snapshot build", + EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() + ); + // negative, named arguments are not supported outside of a functionExpression where booleanExpression or indexPattern is supported + String map = "{\"option1\":\"string\", \"option2\":1}"; + + Map commands = Map.ofEntries( + Map.entry("from {}", "line 1:7: mismatched input '\"option1\"' expecting {, '|', ',', 'metadata'}"), + Map.entry("row x = {}", "line 1:9: extraneous input '{' expecting {QUOTED_STRING, INTEGER_LITERAL"), + Map.entry("eval x = {}", "line 1:22: extraneous input '{' expecting {QUOTED_STRING, INTEGER_LITERAL"), + Map.entry("where x > {}", "line 1:23: no viable alternative at input 'x > {'"), + Map.entry("stats agg() by {}", "line 1:28: extraneous input '{' expecting {QUOTED_STRING, INTEGER_LITERAL"), + Map.entry("sort {}", "line 1:18: extraneous input '{' expecting {QUOTED_STRING, INTEGER_LITERAL"), + Map.entry("keep {}", "line 1:18: token recognition error at: '{'"), + Map.entry("drop {}", "line 1:18: token recognition error at: '{'"), + Map.entry("rename a as {}", "line 1:25: token recognition error at: '{'"), + Map.entry("mv_expand {}", "line 1:23: token recognition error at: '{'"), + Map.entry("limit {}", "line 1:19: mismatched input '{' expecting INTEGER_LITERAL"), + Map.entry("enrich idx2 on f1 with f2 = {}", "line 1:41: token recognition error at: '{'"), + Map.entry("dissect {} \"%{bar}\"", "line 1:21: extraneous input '{' expecting {QUOTED_STRING, INTEGER_LITERAL"), + Map.entry("grok {} \"%{WORD:foo}\"", "line 1:18: extraneous input '{' expecting {QUOTED_STRING, INTEGER_LITERAL") + ); + + for (Map.Entry command : commands.entrySet()) { + String cmd = command.getKey(); + String errorMessage = command.getValue(); + String from = cmd.startsWith("row") || cmd.startsWith("from") ? "" : "from test | "; + expectError(LoggerMessageFormat.format(null, from + cmd, map), errorMessage); + } + } + + public void testNamedFunctionArgumentWithUnsupportedNamedParameterTypes() { + assumeTrue( + "named function arguments require snapshot build", + EsqlCapabilities.Cap.OPTIONAL_NAMED_ARGUMENT_MAP_FOR_FUNCTION.isEnabled() + ); + Map commands = Map.ofEntries( + Map.entry("eval x = {}", "29"), + Map.entry("where {}", "26"), + Map.entry("stats {}", "26"), + Map.entry("stats agg() by {}", "35"), + Map.entry("sort {}", "25"), + Map.entry("dissect {} \"%{bar}\"", "28"), + Map.entry("grok {} \"%{WORD:foo}\"", "25") + ); + + for (Map.Entry command : commands.entrySet()) { + String cmd = command.getKey(); + String error = command.getValue(); + expectError( + LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\"option1\":?n1})"), + List.of(paramAsIdentifier("n1", "v1")), + LoggerMessageFormat.format( + null, + "line 1:{}: {}", + error, + "Invalid named function argument [\"option1\":?n1], only constant value is supported" + ) + ); + expectError( + LoggerMessageFormat.format(null, "from test | " + cmd, "fn(f1, {\"option1\":?n1})"), + List.of(paramAsPattern("n1", "v1")), + LoggerMessageFormat.format( + null, + "line 1:{}: {}", + error, + "Invalid named function argument [\"option1\":?n1], only constant value is supported" + ) + ); + } + } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/EsRelationSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/EsRelationSerializationTests.java index fa3038b24e8eb..18cd9716480e6 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/EsRelationSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/EsRelationSerializationTests.java @@ -8,22 +8,25 @@ package org.elasticsearch.xpack.esql.plan.logical; import org.elasticsearch.index.IndexMode; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.EsIndexSerializationTests; import java.io.IOException; import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.esql.index.EsIndexSerializationTests.randomIndexNameWithModes; public class EsRelationSerializationTests extends AbstractLogicalPlanSerializationTests { public static EsRelation randomEsRelation() { - Source source = randomSource(); - EsIndex index = EsIndexSerializationTests.randomEsIndex(); - List attributes = randomFieldAttributes(0, 10, false); - IndexMode indexMode = randomFrom(IndexMode.values()); - boolean frozen = randomBoolean(); - return new EsRelation(source, index, attributes, indexMode, frozen); + return new EsRelation( + randomSource(), + randomIdentifier(), + randomFrom(IndexMode.values()), + randomIndexNameWithModes(), + randomFieldAttributes(0, 10, false) + ); } @Override @@ -33,18 +36,18 @@ protected EsRelation createTestInstance() { @Override protected EsRelation mutateInstance(EsRelation instance) throws IOException { - EsIndex index = instance.index(); - List attributes = instance.output(); + String indexPattern = instance.indexPattern(); IndexMode indexMode = instance.indexMode(); - boolean frozen = instance.frozen(); + Map indexNameWithModes = instance.indexNameWithModes(); + List attributes = instance.output(); switch (between(0, 3)) { - case 0 -> index = randomValueOtherThan(index, EsIndexSerializationTests::randomEsIndex); - case 1 -> attributes = randomValueOtherThan(attributes, () -> randomFieldAttributes(0, 10, false)); - case 2 -> indexMode = randomValueOtherThan(indexMode, () -> randomFrom(IndexMode.values())); - case 3 -> frozen = false == frozen; + case 0 -> indexPattern = randomValueOtherThan(indexPattern, ESTestCase::randomIdentifier); + case 1 -> indexMode = randomValueOtherThan(indexMode, () -> randomFrom(IndexMode.values())); + case 2 -> indexNameWithModes = randomValueOtherThan(indexNameWithModes, EsIndexSerializationTests::randomIndexNameWithModes); + case 3 -> attributes = randomValueOtherThan(attributes, () -> randomFieldAttributes(0, 10, false)); default -> throw new IllegalArgumentException(); } - return new EsRelation(instance.source(), index, attributes, indexMode, frozen); + return new EsRelation(instance.source(), indexPattern, indexMode, indexNameWithModes, attributes); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelationSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelationSerializationTests.java index 647e7c358eddb..cf59a35799ad1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelationSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/logical/local/LocalRelationSerializationTests.java @@ -9,7 +9,7 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockUtils; -import org.elasticsearch.xpack.esql.TestBlockFactory; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.plan.logical.AbstractLogicalPlanSerializationTests; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExecSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExecSerializationTests.java index 6104069769085..eb53a57d3bdfb 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExecSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/EsQueryExecSerializationTests.java @@ -14,24 +14,28 @@ import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; -import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.EsIndexSerializationTests; import java.io.IOException; import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.esql.index.EsIndexSerializationTests.randomIndexNameWithModes; public class EsQueryExecSerializationTests extends AbstractPhysicalPlanSerializationTests { public static EsQueryExec randomEsQueryExec() { - Source source = randomSource(); - EsIndex index = EsIndexSerializationTests.randomEsIndex(); - IndexMode indexMode = randomFrom(IndexMode.values()); - List attrs = randomFieldAttributes(1, 10, false); - QueryBuilder query = randomQuery(); - Expression limit = new Literal(randomSource(), between(0, Integer.MAX_VALUE), DataType.INTEGER); - Integer estimatedRowSize = randomEstimatedRowSize(); - return new EsQueryExec(source, index, indexMode, attrs, query, limit, EsQueryExec.NO_SORTS, estimatedRowSize); + return new EsQueryExec( + randomSource(), + randomIdentifier(), + randomFrom(IndexMode.values()), + randomIndexNameWithModes(), + randomFieldAttributes(1, 10, false), + randomQuery(), + new Literal(randomSource(), between(0, Integer.MAX_VALUE), DataType.INTEGER), + EsQueryExec.NO_SORTS, + randomEstimatedRowSize() + ); } public static QueryBuilder randomQuery() { @@ -45,27 +49,39 @@ protected EsQueryExec createTestInstance() { @Override protected EsQueryExec mutateInstance(EsQueryExec instance) throws IOException { - EsIndex index = instance.index(); + String indexPattern = instance.indexPattern(); IndexMode indexMode = instance.indexMode(); + Map indexNameWithModes = instance.indexNameWithModes(); List attrs = instance.attrs(); QueryBuilder query = instance.query(); Expression limit = instance.limit(); Integer estimatedRowSize = instance.estimatedRowSize(); - switch (between(0, 5)) { - case 0 -> index = randomValueOtherThan(index, EsIndexSerializationTests::randomEsIndex); + switch (between(0, 6)) { + case 0 -> indexPattern = randomValueOtherThan(indexPattern, EsIndexSerializationTests::randomIdentifier); case 1 -> indexMode = randomValueOtherThan(indexMode, () -> randomFrom(IndexMode.values())); - case 2 -> attrs = randomValueOtherThan(attrs, () -> randomFieldAttributes(1, 10, false)); - case 3 -> query = randomValueOtherThan(query, EsQueryExecSerializationTests::randomQuery); - case 4 -> limit = randomValueOtherThan( + case 2 -> indexNameWithModes = randomValueOtherThan(indexNameWithModes, EsIndexSerializationTests::randomIndexNameWithModes); + case 3 -> attrs = randomValueOtherThan(attrs, () -> randomFieldAttributes(1, 10, false)); + case 4 -> query = randomValueOtherThan(query, EsQueryExecSerializationTests::randomQuery); + case 5 -> limit = randomValueOtherThan( limit, () -> new Literal(randomSource(), between(0, Integer.MAX_VALUE), DataType.INTEGER) ); - case 5 -> estimatedRowSize = randomValueOtherThan( + case 6 -> estimatedRowSize = randomValueOtherThan( estimatedRowSize, AbstractPhysicalPlanSerializationTests::randomEstimatedRowSize ); } - return new EsQueryExec(instance.source(), index, indexMode, attrs, query, limit, EsQueryExec.NO_SORTS, estimatedRowSize); + return new EsQueryExec( + instance.source(), + indexPattern, + indexMode, + indexNameWithModes, + attrs, + query, + limit, + EsQueryExec.NO_SORTS, + estimatedRowSize + ); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/EsSourceExecSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/EsSourceExecSerializationTests.java index 253127cc7ee95..a072a2e23a506 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/EsSourceExecSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/EsSourceExecSerializationTests.java @@ -10,24 +10,26 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermQueryBuilder; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.core.expression.Attribute; -import org.elasticsearch.xpack.esql.core.tree.Source; -import org.elasticsearch.xpack.esql.index.EsIndex; import org.elasticsearch.xpack.esql.index.EsIndexSerializationTests; import java.io.IOException; import java.util.List; +import java.util.Map; -import static org.elasticsearch.xpack.esql.plan.logical.AbstractLogicalPlanSerializationTests.randomFieldAttributes; +import static org.elasticsearch.xpack.esql.index.EsIndexSerializationTests.randomIndexNameWithModes; public class EsSourceExecSerializationTests extends AbstractPhysicalPlanSerializationTests { public static EsSourceExec randomEsSourceExec() { - Source source = randomSource(); - EsIndex index = EsIndexSerializationTests.randomEsIndex(); - List attributes = randomFieldAttributes(1, 10, false); - QueryBuilder query = new TermQueryBuilder(randomAlphaOfLength(5), randomAlphaOfLength(5)); - IndexMode indexMode = randomFrom(IndexMode.values()); - return new EsSourceExec(source, index, attributes, query, indexMode); + return new EsSourceExec( + randomSource(), + randomIdentifier(), + randomFrom(IndexMode.values()), + randomIndexNameWithModes(), + randomFieldAttributes(1, 10, false), + new TermQueryBuilder(randomAlphaOfLength(5), randomAlphaOfLength(5)) + ); } @Override @@ -37,18 +39,20 @@ protected EsSourceExec createTestInstance() { @Override protected EsSourceExec mutateInstance(EsSourceExec instance) throws IOException { - EsIndex index = instance.index(); + String indexPattern = instance.indexPattern(); + IndexMode indexMode = instance.indexMode(); + Map indexNameWithModes = instance.indexNameWithModes(); List attributes = instance.output(); QueryBuilder query = instance.query(); - IndexMode indexMode = instance.indexMode(); - switch (between(0, 3)) { - case 0 -> index = randomValueOtherThan(index, EsIndexSerializationTests::randomEsIndex); - case 1 -> attributes = randomValueOtherThan(attributes, () -> randomFieldAttributes(1, 10, false)); - case 2 -> query = randomValueOtherThan(query, () -> new TermQueryBuilder(randomAlphaOfLength(5), randomAlphaOfLength(5))); - case 3 -> indexMode = randomValueOtherThan(indexMode, () -> randomFrom(IndexMode.values())); + switch (between(0, 4)) { + case 0 -> indexPattern = randomValueOtherThan(indexPattern, ESTestCase::randomIdentifier); + case 1 -> indexMode = randomValueOtherThan(indexMode, () -> randomFrom(IndexMode.values())); + case 2 -> indexNameWithModes = randomValueOtherThan(indexNameWithModes, EsIndexSerializationTests::randomIndexNameWithModes); + case 3 -> attributes = randomValueOtherThan(attributes, () -> randomFieldAttributes(1, 10, false)); + case 4 -> query = randomValueOtherThan(query, () -> new TermQueryBuilder(randomAlphaOfLength(5), randomAlphaOfLength(5))); default -> throw new IllegalStateException(); } - return new EsSourceExec(instance.source(), index, attributes, query, indexMode); + return new EsSourceExec(instance.source(), indexPattern, indexMode, indexNameWithModes, attributes, query); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java index f8e12cd4f5ba9..1e930e1da82e8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/plan/physical/ExchangeSinkExecSerializationTests.java @@ -25,7 +25,11 @@ import java.io.IOException; import java.util.List; +import java.util.Map; +import java.util.function.Function; +import java.util.stream.IntStream; +import static java.util.stream.Collectors.toMap; import static org.elasticsearch.test.ByteSizeEqualsMatcher.byteSizeEquals; import static org.hamcrest.Matchers.equalTo; @@ -66,14 +70,15 @@ protected boolean alwaysEmptySource() { * See {@link #testManyTypeConflicts(boolean, ByteSizeValue)} for more. */ public void testManyTypeConflicts() throws IOException { - testManyTypeConflicts(false, ByteSizeValue.ofBytes(1424046L)); /* * History: * 2.3mb - shorten error messages for UnsupportedAttributes #111973 * 1.8mb - cache EsFields #112008 * 1.4mb - string serialization #112929 * 1424046b - remove node-level plan #117422 + * 1040607b - remove EsIndex mapping serialization #119580 */ + testManyTypeConflicts(false, ByteSizeValue.ofBytes(1040607)); } /** @@ -81,7 +86,6 @@ public void testManyTypeConflicts() throws IOException { * See {@link #testManyTypeConflicts(boolean, ByteSizeValue)} for more. */ public void testManyTypeConflictsWithParent() throws IOException { - testManyTypeConflicts(true, ByteSizeValue.ofBytes(2774190)); /* * History: * 2 gb+ - start @@ -91,7 +95,9 @@ public void testManyTypeConflictsWithParent() throws IOException { * 2774214b - string serialization #112929 * 2774192b - remove field attribute #112881 * 2774190b - remove node-level plan #117422 + * 2007288b - remove EsIndex mapping serialization #119580 */ + testManyTypeConflicts(true, ByteSizeValue.ofBytes(2007288)); } private void testManyTypeConflicts(boolean withParent, ByteSizeValue expected) throws IOException { @@ -105,19 +111,19 @@ private void testManyTypeConflicts(boolean withParent, ByteSizeValue expected) t * with a single root field that has many children, grandchildren etc. */ public void testDeeplyNestedFields() throws IOException { - ByteSizeValue expected = ByteSizeValue.ofBytes(47252409); /* * History: * 48223371b - string serialization #112929 * 47252411b - remove field attribute #112881 - * 47252409b - remove node-level plan + * 47252409b - remove node-level plan #117422 + * 43927169b - remove EsIndex mapping serialization #119580 */ int depth = 6; int childrenPerLevel = 8; EsIndex index = EsIndexSerializationTests.deeplyNestedIndex(depth, childrenPerLevel); - testSerializePlanWithIndex(index, expected); + testSerializePlanWithIndex(index, ByteSizeValue.ofBytes(43927169)); } /** @@ -126,19 +132,39 @@ public void testDeeplyNestedFields() throws IOException { * with a single root field that has many children, grandchildren etc. */ public void testDeeplyNestedFieldsKeepOnlyOne() throws IOException { - ByteSizeValue expected = ByteSizeValue.ofBytes(9425804); /* * History: * 9426058b - string serialization #112929 * 9425806b - remove field attribute #112881 * 9425804b - remove node-level plan #117422 + * 352b - remove EsIndex mapping serialization #119580 */ int depth = 6; int childrenPerLevel = 9; EsIndex index = EsIndexSerializationTests.deeplyNestedIndex(depth, childrenPerLevel); - testSerializePlanWithIndex(index, expected, false); + testSerializePlanWithIndex(index, ByteSizeValue.ofBytes(352), false); + } + + /** + * Test the size of serializing a plan like + * FROM index* | LIMIT 10 | KEEP one_single_field + * with an index pattern pointing to a hundred actual indices with rather long names + */ + public void testIndexPatternTargetingMultipleIndices() throws IOException { + /* + * History: 4996b - initial + */ + + var index = new EsIndex( + "index*", + Map.of(), + IntStream.range(0, 100) + .mapToObj(i -> "partial-.ds-index-service-logs-2025.01.01-000" + i) + .collect(toMap(Function.identity(), i -> IndexMode.STANDARD)) + ); + testSerializePlanWithIndex(index, ByteSizeValue.ofBytes(4996)); } /** @@ -165,8 +191,8 @@ private void testSerializePlanWithIndex(EsIndex index, ByteSizeValue expected) t private void testSerializePlanWithIndex(EsIndex index, ByteSizeValue expected, boolean keepAllFields) throws IOException { List allAttributes = Analyzer.mappingAsAttributes(randomSource(), index.mapping()); - List keepAttributes = keepAllFields ? allAttributes : List.of(allAttributes.get(0)); - EsRelation relation = new EsRelation(randomSource(), index, keepAttributes, IndexMode.STANDARD); + List keepAttributes = keepAllFields || allAttributes.isEmpty() ? allAttributes : List.of(allAttributes.getFirst()); + EsRelation relation = new EsRelation(randomSource(), index.name(), IndexMode.STANDARD, index.indexNameWithModes(), keepAttributes); Limit limit = new Limit(randomSource(), new Literal(randomSource(), 10, DataType.INTEGER), relation); Project project = new Project(randomSource(), limit, limit.output()); FragmentExec fragmentExec = new FragmentExec(project); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java index e2eb05b0c14d3..6dc1eac2e5814 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java @@ -14,17 +14,14 @@ import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.SerializationTestUtils; -import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.core.expression.Expression; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.And; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Not; -import org.elasticsearch.xpack.esql.core.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.core.tree.Source; import org.elasticsearch.xpack.esql.core.type.DataType; import org.elasticsearch.xpack.esql.core.type.EsField; @@ -40,6 +37,9 @@ import org.elasticsearch.xpack.esql.expression.function.scalar.string.Length; import org.elasticsearch.xpack.esql.expression.function.scalar.string.StartsWith; import org.elasticsearch.xpack.esql.expression.function.scalar.string.Substring; +import org.elasticsearch.xpack.esql.expression.predicate.logical.And; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Not; +import org.elasticsearch.xpack.esql.expression.predicate.logical.Or; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Add; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Mod; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/GrokEvaluatorExtracterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/GrokEvaluatorExtracterTests.java index af7a66fea9bb2..d217dafb7442a 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/GrokEvaluatorExtracterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/GrokEvaluatorExtracterTests.java @@ -16,10 +16,10 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.grok.Grok; import org.elasticsearch.grok.GrokBuiltinPatterns; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.evaluator.command.GrokEvaluatorExtracter; import java.util.Map; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index a1648c67d9bd4..4ef51d44b9b34 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.LuceneTopNSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -30,7 +31,6 @@ import org.elasticsearch.index.mapper.MapperServiceTestCase; import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ContextIndexSearcher; -import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; import org.elasticsearch.xpack.esql.core.expression.Literal; @@ -85,7 +85,17 @@ public void testLuceneSourceOperatorHugeRowSize() throws IOException { int estimatedRowSize = randomEstimatedRowSize(estimatedRowSizeIsHuge); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( FoldContext.small(), - new EsQueryExec(Source.EMPTY, index(), IndexMode.STANDARD, List.of(), null, null, null, estimatedRowSize) + new EsQueryExec( + Source.EMPTY, + index().name(), + IndexMode.STANDARD, + index().indexNameWithModes(), + List.of(), + null, + null, + null, + estimatedRowSize + ) ); assertThat(plan.driverFactories.size(), lessThanOrEqualTo(pragmas.taskConcurrency())); LocalExecutionPlanner.DriverSupplier supplier = plan.driverFactories.get(0).driverSupplier(); @@ -101,7 +111,17 @@ public void testLuceneTopNSourceOperator() throws IOException { Literal limit = new Literal(Source.EMPTY, 10, DataType.INTEGER); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( FoldContext.small(), - new EsQueryExec(Source.EMPTY, index(), IndexMode.STANDARD, List.of(), null, limit, List.of(sort), estimatedRowSize) + new EsQueryExec( + Source.EMPTY, + index().name(), + IndexMode.STANDARD, + index().indexNameWithModes(), + List.of(), + null, + limit, + List.of(sort), + estimatedRowSize + ) ); assertThat(plan.driverFactories.size(), lessThanOrEqualTo(pragmas.taskConcurrency())); LocalExecutionPlanner.DriverSupplier supplier = plan.driverFactories.get(0).driverSupplier(); @@ -117,7 +137,17 @@ public void testLuceneTopNSourceOperatorDistanceSort() throws IOException { Literal limit = new Literal(Source.EMPTY, 10, DataType.INTEGER); LocalExecutionPlanner.LocalExecutionPlan plan = planner().plan( FoldContext.small(), - new EsQueryExec(Source.EMPTY, index(), IndexMode.STANDARD, List.of(), null, limit, List.of(sort), estimatedRowSize) + new EsQueryExec( + Source.EMPTY, + index().name(), + IndexMode.STANDARD, + index().indexNameWithModes(), + List.of(), + null, + limit, + List.of(sort), + estimatedRowSize + ) ); assertThat(plan.driverFactories.size(), lessThanOrEqualTo(pragmas.taskConcurrency())); LocalExecutionPlanner.DriverSupplier supplier = plan.driverFactories.get(0).driverSupplier(); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java index c5933f134f9a9..780045077f7b8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java @@ -31,6 +31,7 @@ import org.elasticsearch.compute.operator.OrdinalsGroupingOperator; import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.operator.SourceOperator.SourceOperatorFactory; +import org.elasticsearch.compute.test.TestBlockFactory; import org.elasticsearch.core.Nullable; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; @@ -44,7 +45,6 @@ import org.elasticsearch.lucene.spatial.CoordinateEncoder; import org.elasticsearch.plugins.scanners.StablePluginsRegistry; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.core.expression.Attribute; import org.elasticsearch.xpack.esql.core.expression.FieldAttribute; import org.elasticsearch.xpack.esql.core.expression.FoldContext; diff --git a/x-pack/plugin/identity-provider/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/identity-provider/src/main/plugin-metadata/entitlement-policy.yaml index d826de8ca8725..ee6094d2ffef2 100644 --- a/x-pack/plugin/identity-provider/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/identity-provider/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,2 +1,5 @@ ALL-UNNAMED: - set_https_connection_properties # potentially required by apache.httpcomponents + - write_system_properties: + properties: + - org.apache.xml.security.ignoreLineBreaks diff --git a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java index d2be50cb5e841..610fafb8390da 100644 --- a/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java +++ b/x-pack/plugin/inference/qa/inference-service-tests/src/javaRestTest/java/org/elasticsearch/xpack/inference/InferenceCrudIT.java @@ -272,9 +272,9 @@ public void testGetServicesWithChatCompletionTaskType() throws IOException { List services = getServices(TaskType.CHAT_COMPLETION); if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { - assertThat(services.size(), equalTo(2)); + assertThat(services.size(), equalTo(3)); } else { - assertThat(services.size(), equalTo(1)); + assertThat(services.size(), equalTo(2)); } String[] providers = new String[services.size()]; @@ -283,7 +283,7 @@ public void testGetServicesWithChatCompletionTaskType() throws IOException { providers[i] = (String) serviceConfig.get("service"); } - var providerList = new ArrayList<>(List.of("openai")); + var providerList = new ArrayList<>(List.of("openai", "streaming_completion_test_service")); if ((ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled() || ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG.isEnabled())) { @@ -519,14 +519,19 @@ public void testSupportedStream() throws Exception { public void testUnifiedCompletionInference() throws Exception { String modelId = "streaming"; - putModel(modelId, mockCompletionServiceModelConfig(TaskType.COMPLETION)); + putModel(modelId, mockCompletionServiceModelConfig(TaskType.CHAT_COMPLETION)); var singleModel = getModel(modelId); assertEquals(modelId, singleModel.get("inference_id")); - assertEquals(TaskType.COMPLETION.toString(), singleModel.get("task_type")); + assertEquals(TaskType.CHAT_COMPLETION.toString(), singleModel.get("task_type")); var input = IntStream.range(1, 2 + randomInt(8)).mapToObj(i -> randomAlphanumericOfLength(5)).toList(); try { - var events = unifiedCompletionInferOnMockService(modelId, TaskType.COMPLETION, input, VALIDATE_ELASTIC_PRODUCT_HEADER_CONSUMER); + var events = unifiedCompletionInferOnMockService( + modelId, + TaskType.CHAT_COMPLETION, + input, + VALIDATE_ELASTIC_PRODUCT_HEADER_CONSUMER + ); var expectedResponses = expectedResultsIterator(input); assertThat(events.size(), equalTo((input.size() + 1) * 2)); events.forEach(event -> { diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java index 89c79dd148598..1f17e335462a7 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestDenseInferenceServiceExtension.java @@ -257,7 +257,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( "model", - new SettingsConfiguration.Builder().setDescription("") + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription("") .setLabel("Model") .setRequired(true) .setSensitive(true) diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java index 77c762a38baaf..e79c8b9bad522 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestRerankingServiceExtension.java @@ -171,7 +171,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( "model", - new SettingsConfiguration.Builder().setDescription("") + new SettingsConfiguration.Builder(EnumSet.of(TaskType.RERANK)).setDescription("") .setLabel("Model") .setRequired(true) .setSensitive(true) diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java index bef0b1812beda..f700f6672fd63 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestSparseInferenceServiceExtension.java @@ -205,7 +205,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( "model", - new SettingsConfiguration.Builder().setDescription("") + new SettingsConfiguration.Builder(EnumSet.of(TaskType.SPARSE_EMBEDDING)).setDescription("") .setLabel("Model") .setRequired(true) .setSensitive(false) @@ -215,7 +215,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( "hidden_field", - new SettingsConfiguration.Builder().setDescription("") + new SettingsConfiguration.Builder(EnumSet.of(TaskType.SPARSE_EMBEDDING)).setDescription("") .setLabel("Hidden Field") .setRequired(true) .setSensitive(false) diff --git a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java index e071b704c233e..b0e43c8607078 100644 --- a/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java +++ b/x-pack/plugin/inference/qa/test-service-plugin/src/main/java/org/elasticsearch/xpack/inference/mock/TestStreamingCompletionServiceExtension.java @@ -55,9 +55,9 @@ public List getInferenceServiceFactories() { public static class TestInferenceService extends AbstractTestInferenceService { private static final String NAME = "streaming_completion_test_service"; - private static final Set supportedStreamingTasks = Set.of(TaskType.COMPLETION); + private static final Set supportedStreamingTasks = Set.of(TaskType.COMPLETION, TaskType.CHAT_COMPLETION); - private static final EnumSet supportedTaskTypes = EnumSet.of(TaskType.COMPLETION); + private static final EnumSet supportedTaskTypes = EnumSet.of(TaskType.COMPLETION, TaskType.CHAT_COMPLETION); public TestInferenceService(InferenceServiceExtension.InferenceServiceFactoryContext context) {} @@ -129,7 +129,7 @@ public void unifiedCompletionInfer( ActionListener listener ) { switch (model.getConfigurations().getTaskType()) { - case COMPLETION -> listener.onResponse(makeUnifiedResults(request)); + case CHAT_COMPLETION -> listener.onResponse(makeUnifiedResults(request)); default -> listener.onFailure( new ElasticsearchStatusException( TaskType.unsupportedTaskTypeErrorMsg(model.getConfigurations().getTaskType(), name()), @@ -257,7 +257,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( "model_id", - new SettingsConfiguration.Builder().setDescription("") + new SettingsConfiguration.Builder(EnumSet.of(TaskType.COMPLETION)).setDescription("") .setLabel("Model ID") .setRequired(true) .setSensitive(true) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index 6ba529cb66eaa..830eab8c42366 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -39,6 +39,7 @@ import org.elasticsearch.plugins.Plugin; import org.elasticsearch.plugins.SearchPlugin; import org.elasticsearch.plugins.SystemIndexPlugin; +import org.elasticsearch.plugins.internal.InternalSearchPlugin; import org.elasticsearch.plugins.internal.rewriter.QueryRewriteInterceptor; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; @@ -136,7 +137,14 @@ import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature.DEPRECATED_ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG; import static org.elasticsearch.xpack.inference.services.elastic.ElasticInferenceServiceFeature.ELASTIC_INFERENCE_SERVICE_FEATURE_FLAG; -public class InferencePlugin extends Plugin implements ActionPlugin, ExtensiblePlugin, SystemIndexPlugin, MapperPlugin, SearchPlugin { +public class InferencePlugin extends Plugin + implements + ActionPlugin, + ExtensiblePlugin, + SystemIndexPlugin, + MapperPlugin, + SearchPlugin, + InternalSearchPlugin { /** * When this setting is true the verification check that diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java index 1478130f6a6c8..9354ac2a83182 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionInferenceAction.java @@ -52,7 +52,7 @@ public TransportUnifiedCompletionInferenceAction( @Override protected boolean isInvalidTaskTypeForInferenceEndpoint(UnifiedCompletionAction.Request request, UnparsedModel unparsedModel) { - return request.getTaskType().isAnyOrSame(TaskType.COMPLETION) == false || unparsedModel.taskType() != TaskType.COMPLETION; + return request.getTaskType().isAnyOrSame(TaskType.CHAT_COMPLETION) == false || unparsedModel.taskType() != TaskType.CHAT_COMPLETION; } @Override @@ -64,7 +64,7 @@ protected ElasticsearchStatusException createInvalidTaskTypeException( "Incompatible task_type for unified API, the requested type [{}] must be one of [{}]", RestStatus.BAD_REQUEST, request.getTaskType(), - TaskType.COMPLETION.toString() + TaskType.CHAT_COMPLETION.toString() ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionRequestManager.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionRequestManager.java index 4d730be6aa6bd..ca25b56953251 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionRequestManager.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/sender/OpenAiCompletionRequestManager.java @@ -26,7 +26,7 @@ public class OpenAiCompletionRequestManager extends OpenAiRequestManager { private static final Logger logger = LogManager.getLogger(OpenAiCompletionRequestManager.class); private static final ResponseHandler HANDLER = createCompletionHandler(); - static final String USER_ROLE = "user"; + public static final String USER_ROLE = "user"; public static OpenAiCompletionRequestManager of(OpenAiChatCompletionModel model, ThreadPool threadPool) { return new OpenAiCompletionRequestManager(Objects.requireNonNull(model), Objects.requireNonNull(threadPool)); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java index 24f7fa182b7c2..0fd0c281d8bc6 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchService.java @@ -379,7 +379,9 @@ public static InferenceServiceConfiguration get() { configurationMap.put( SERVICE_ID, - new SettingsConfiguration.Builder().setDescription("The name of the model service to use for the {infer} task.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( + "The name of the model service to use for the {infer} task." + ) .setLabel("Project ID") .setRequired(true) .setSensitive(false) @@ -390,7 +392,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( HOST, - new SettingsConfiguration.Builder().setDescription( + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( "The name of the host address used for the {infer} task. You can find the host address at " + "https://opensearch.console.aliyun.com/cn-shanghai/rag/api-key[ the API keys section] " + "of the documentation." @@ -405,7 +407,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( HTTP_SCHEMA_NAME, - new SettingsConfiguration.Builder().setDescription("") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("") .setLabel("HTTP Schema") .setRequired(true) .setSensitive(false) @@ -416,7 +418,9 @@ public static InferenceServiceConfiguration get() { configurationMap.put( WORKSPACE_NAME, - new SettingsConfiguration.Builder().setDescription("The name of the workspace used for the {infer} task.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( + "The name of the workspace used for the {infer} task." + ) .setLabel("Workspace") .setRequired(true) .setSensitive(false) @@ -426,9 +430,12 @@ public static InferenceServiceConfiguration get() { ); configurationMap.putAll( - DefaultSecretSettings.toSettingsConfigurationWithDescription("A valid API key for the AlibabaCloud AI Search API.") + DefaultSecretSettings.toSettingsConfigurationWithDescription( + "A valid API key for the AlibabaCloud AI Search API.", + supportedTaskTypes + ) ); - configurationMap.putAll(RateLimitSettings.toSettingsConfiguration()); + configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); return new InferenceServiceConfiguration.Builder().setService(NAME) .setName(SERVICE_NAME) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java index 2105da235babe..80750063b120e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockSecretSettings.java @@ -18,11 +18,13 @@ import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.inference.SettingsConfiguration; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Collections; +import java.util.EnumSet; import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -128,7 +130,9 @@ public static Map get() { var configurationMap = new HashMap(); configurationMap.put( ACCESS_KEY_FIELD, - new SettingsConfiguration.Builder().setDescription("A valid AWS access key that has permissions to use Amazon Bedrock.") + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.COMPLETION)).setDescription( + "A valid AWS access key that has permissions to use Amazon Bedrock." + ) .setLabel("Access Key") .setRequired(true) .setSensitive(true) @@ -138,7 +142,9 @@ public static Map get() { ); configurationMap.put( SECRET_KEY_FIELD, - new SettingsConfiguration.Builder().setDescription("A valid AWS secret key that is paired with the access_key.") + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.COMPLETION)).setDescription( + "A valid AWS secret key that is paired with the access_key." + ) .setLabel("Secret Key") .setRequired(true) .setSensitive(true) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java index 07c5e91776192..e13c668197a8f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockService.java @@ -378,7 +378,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( PROVIDER_FIELD, - new SettingsConfiguration.Builder().setDescription("The model provider for your deployment.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("The model provider for your deployment.") .setLabel("Provider") .setRequired(true) .setSensitive(false) @@ -389,7 +389,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( MODEL_FIELD, - new SettingsConfiguration.Builder().setDescription( + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( "The base model ID or an ARN to a custom model based on a foundational model." ) .setLabel("Model") @@ -402,7 +402,9 @@ public static InferenceServiceConfiguration get() { configurationMap.put( REGION_FIELD, - new SettingsConfiguration.Builder().setDescription("The region that your model or ARN is deployed in.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( + "The region that your model or ARN is deployed in." + ) .setLabel("Region") .setRequired(true) .setSensitive(false) @@ -414,7 +416,8 @@ public static InferenceServiceConfiguration get() { configurationMap.putAll(AmazonBedrockSecretSettings.Configuration.get()); configurationMap.putAll( RateLimitSettings.toSettingsConfigurationWithDescription( - "By default, the amazonbedrock service sets the number of requests allowed per minute to 240." + "By default, the amazonbedrock service sets the number of requests allowed per minute to 240.", + supportedTaskTypes ) ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java index 9dbfb0732f463..64fe42fbbc171 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicService.java @@ -256,7 +256,9 @@ public static InferenceServiceConfiguration get() { configurationMap.put( MODEL_ID, - new SettingsConfiguration.Builder().setDescription("The name of the model to use for the inference task.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( + "The name of the model to use for the inference task." + ) .setLabel("Model ID") .setRequired(true) .setSensitive(false) @@ -265,10 +267,11 @@ public static InferenceServiceConfiguration get() { .build() ); - configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration()); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); configurationMap.putAll( RateLimitSettings.toSettingsConfigurationWithDescription( - "By default, the anthropic service sets the number of requests allowed per minute to 50." + "By default, the anthropic service sets the number of requests allowed per minute to 50.", + supportedTaskTypes ) ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java index 649540f7efc5c..88d5b54398d06 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioService.java @@ -406,7 +406,9 @@ public static InferenceServiceConfiguration get() { configurationMap.put( TARGET_FIELD, - new SettingsConfiguration.Builder().setDescription("The target URL of your Azure AI Studio model deployment.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( + "The target URL of your Azure AI Studio model deployment." + ) .setLabel("Target") .setRequired(true) .setSensitive(false) @@ -417,7 +419,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( ENDPOINT_TYPE_FIELD, - new SettingsConfiguration.Builder().setDescription( + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( "Specifies the type of endpoint that is used in your model deployment." ) .setLabel("Endpoint Type") @@ -430,7 +432,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( PROVIDER_FIELD, - new SettingsConfiguration.Builder().setDescription("The model provider for your deployment.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("The model provider for your deployment.") .setLabel("Provider") .setRequired(true) .setSensitive(false) @@ -439,8 +441,8 @@ public static InferenceServiceConfiguration get() { .build() ); - configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration()); - configurationMap.putAll(RateLimitSettings.toSettingsConfiguration()); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); + configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); return new InferenceServiceConfiguration.Builder().setService(NAME) .setName(SERVICE_NAME) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java index 0601daf562ce9..8405b35a35d9a 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiSecretSettings.java @@ -18,11 +18,13 @@ import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.inference.SettingsConfiguration; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Collections; +import java.util.EnumSet; import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -146,7 +148,9 @@ public static Map get() { var configurationMap = new HashMap(); configurationMap.put( API_KEY, - new SettingsConfiguration.Builder().setDescription("You must provide either an API key or an Entra ID.") + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.COMPLETION)).setDescription( + "You must provide either an API key or an Entra ID." + ) .setLabel("API Key") .setRequired(false) .setSensitive(true) @@ -156,7 +160,9 @@ public static Map get() { ); configurationMap.put( ENTRA_ID, - new SettingsConfiguration.Builder().setDescription("You must provide either an API key or an Entra ID.") + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.COMPLETION)).setDescription( + "You must provide either an API key or an Entra ID." + ) .setLabel("Entra ID") .setRequired(false) .setSensitive(true) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java index 4fca5a460a12a..5b622d68f2c25 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiService.java @@ -351,7 +351,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( RESOURCE_NAME, - new SettingsConfiguration.Builder().setDescription("The name of your Azure OpenAI resource.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("The name of your Azure OpenAI resource.") .setLabel("Resource Name") .setRequired(true) .setSensitive(false) @@ -362,7 +362,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( API_VERSION, - new SettingsConfiguration.Builder().setDescription("The Azure API version ID to use.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("The Azure API version ID to use.") .setLabel("API Version") .setRequired(true) .setSensitive(false) @@ -373,7 +373,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( DEPLOYMENT_ID, - new SettingsConfiguration.Builder().setDescription("The deployment name of your deployed models.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("The deployment name of your deployed models.") .setLabel("Deployment ID") .setRequired(true) .setSensitive(false) @@ -385,7 +385,8 @@ public static InferenceServiceConfiguration get() { configurationMap.putAll(AzureOpenAiSecretSettings.Configuration.get()); configurationMap.putAll( RateLimitSettings.toSettingsConfigurationWithDescription( - "The azureopenai service sets a default number of requests allowed per minute depending on the task type." + "The azureopenai service sets a default number of requests allowed per minute depending on the task type.", + supportedTaskTypes ) ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 60ab8ca68d5d9..60326a8a34ca3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -363,8 +363,8 @@ public static InferenceServiceConfiguration get() { () -> { var configurationMap = new HashMap(); - configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration()); - configurationMap.putAll(RateLimitSettings.toSettingsConfiguration()); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); + configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); return new InferenceServiceConfiguration.Builder().setService(NAME) .setName(SERVICE_NAME) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java index cb554cf288121..a8a0053796e8c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceService.java @@ -251,7 +251,7 @@ private static ElasticInferenceServiceModel createModel( eisServiceComponents, context ); - case COMPLETION -> new ElasticInferenceServiceCompletionModel( + case CHAT_COMPLETION -> new ElasticInferenceServiceCompletionModel( inferenceEntityId, taskType, NAME, @@ -379,7 +379,9 @@ public static InferenceServiceConfiguration get() { configurationMap.put( MODEL_ID, - new SettingsConfiguration.Builder().setDescription("The name of the model to use for the inference task.") + new SettingsConfiguration.Builder(SUPPORTED_TASK_TYPES_FOR_SERVICES_API).setDescription( + "The name of the model to use for the inference task." + ) .setLabel("Model ID") .setRequired(true) .setSensitive(false) @@ -390,7 +392,9 @@ public static InferenceServiceConfiguration get() { configurationMap.put( MAX_INPUT_TOKENS, - new SettingsConfiguration.Builder().setDescription("Allows you to specify the maximum number of tokens per input.") + new SettingsConfiguration.Builder(EnumSet.of(TaskType.SPARSE_EMBEDDING)).setDescription( + "Allows you to specify the maximum number of tokens per input." + ) .setLabel("Maximum Input Tokens") .setRequired(false) .setSensitive(false) @@ -399,7 +403,7 @@ public static InferenceServiceConfiguration get() { .build() ); - configurationMap.putAll(RateLimitSettings.toSettingsConfiguration()); + configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(SUPPORTED_TASK_TYPES_FOR_SERVICES_API)); return new InferenceServiceConfiguration.Builder().setService(NAME) .setName(SERVICE_NAME) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java index 2931f2e23f12d..c538b9acf1321 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalService.java @@ -1147,7 +1147,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( MODEL_ID, - new SettingsConfiguration.Builder().setDefaultValue(MULTILINGUAL_E5_SMALL_MODEL_ID) + new SettingsConfiguration.Builder(supportedTaskTypes).setDefaultValue(MULTILINGUAL_E5_SMALL_MODEL_ID) .setDescription("The name of the model to use for the inference task.") .setLabel("Model ID") .setRequired(true) @@ -1159,7 +1159,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( NUM_ALLOCATIONS, - new SettingsConfiguration.Builder().setDefaultValue(1) + new SettingsConfiguration.Builder(supportedTaskTypes).setDefaultValue(1) .setDescription("The total number of allocations this model is assigned across machine learning nodes.") .setLabel("Number Allocations") .setRequired(true) @@ -1171,7 +1171,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( NUM_THREADS, - new SettingsConfiguration.Builder().setDefaultValue(2) + new SettingsConfiguration.Builder(supportedTaskTypes).setDefaultValue(2) .setDescription("Sets the number of threads used by each model allocation during inference.") .setLabel("Number Threads") .setRequired(true) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java index 1dbf2ca3e2dad..205cc545a23f0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioService.java @@ -351,7 +351,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( MODEL_ID, - new SettingsConfiguration.Builder().setDescription("ID of the LLM you're using.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("ID of the LLM you're using.") .setLabel("Model ID") .setRequired(true) .setSensitive(false) @@ -360,8 +360,8 @@ public static InferenceServiceConfiguration get() { .build() ); - configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration()); - configurationMap.putAll(RateLimitSettings.toSettingsConfiguration()); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); + configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); return new InferenceServiceConfiguration.Builder().setService(NAME) .setName(SERVICE_NAME) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettings.java index b185800ed75f4..9a39e200368cf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiSecretSettings.java @@ -18,11 +18,13 @@ import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.inference.SettingsConfiguration; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.Collections; +import java.util.EnumSet; import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -122,7 +124,9 @@ public static Map get() { var configurationMap = new HashMap(); configurationMap.put( SERVICE_ACCOUNT_JSON, - new SettingsConfiguration.Builder().setDescription("API Key for the provider you're connecting to.") + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING, TaskType.RERANK)).setDescription( + "API Key for the provider you're connecting to." + ) .setLabel("Credentials JSON") .setRequired(true) .setSensitive(true) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java index 8fe9f29c73747..55397b2398d39 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiService.java @@ -329,7 +329,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( MODEL_ID, - new SettingsConfiguration.Builder().setDescription("ID of the LLM you're using.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("ID of the LLM you're using.") .setLabel("Model ID") .setRequired(true) .setSensitive(false) @@ -340,7 +340,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( LOCATION, - new SettingsConfiguration.Builder().setDescription( + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( "Please provide the GCP region where the Vertex AI API(s) is enabled. " + "For more information, refer to the {geminiVertexAIDocs}." ) @@ -354,7 +354,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( PROJECT_ID, - new SettingsConfiguration.Builder().setDescription( + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( "The GCP Project ID which has Vertex AI API(s) enabled. For more information " + "on the URL, refer to the {geminiVertexAIDocs}." ) @@ -367,7 +367,7 @@ public static InferenceServiceConfiguration get() { ); configurationMap.putAll(GoogleVertexAiSecretSettings.Configuration.get()); - configurationMap.putAll(RateLimitSettings.toSettingsConfiguration()); + configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); return new InferenceServiceConfiguration.Builder().setService(NAME) .setName(SERVICE_NAME) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java index ef6beb8ec2627..73c1446b9bb26 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java @@ -181,7 +181,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( URL, - new SettingsConfiguration.Builder().setDefaultValue("https://api.openai.com/v1/embeddings") + new SettingsConfiguration.Builder(supportedTaskTypes).setDefaultValue("https://api.openai.com/v1/embeddings") .setDescription("The URL endpoint to use for the requests.") .setLabel("URL") .setRequired(true) @@ -191,8 +191,8 @@ public static InferenceServiceConfiguration get() { .build() ); - configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration()); - configurationMap.putAll(RateLimitSettings.toSettingsConfiguration()); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); + configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); return new InferenceServiceConfiguration.Builder().setService(NAME) .setName(SERVICE_NAME) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java index 52d42f570a413..9c76cc5c41fb1 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserService.java @@ -176,7 +176,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( URL, - new SettingsConfiguration.Builder().setDescription("The URL endpoint to use for the requests.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("The URL endpoint to use for the requests.") .setLabel("URL") .setRequired(true) .setSensitive(false) @@ -185,8 +185,8 @@ public static InferenceServiceConfiguration get() { .build() ); - configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration()); - configurationMap.putAll(RateLimitSettings.toSettingsConfiguration()); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); + configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); return new InferenceServiceConfiguration.Builder().setService(NAME) .setName(SERVICE_NAME) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java index dd368f88a993c..477225f00d22b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxService.java @@ -324,7 +324,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( API_VERSION, - new SettingsConfiguration.Builder().setDescription("The IBM Watsonx API version ID to use.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("The IBM Watsonx API version ID to use.") .setLabel("API Version") .setRequired(true) .setSensitive(false) @@ -335,7 +335,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( PROJECT_ID, - new SettingsConfiguration.Builder().setDescription("") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("") .setLabel("Project ID") .setRequired(true) .setSensitive(false) @@ -346,7 +346,9 @@ public static InferenceServiceConfiguration get() { configurationMap.put( MODEL_ID, - new SettingsConfiguration.Builder().setDescription("The name of the model to use for the inference task.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( + "The name of the model to use for the inference task." + ) .setLabel("Model ID") .setRequired(true) .setSensitive(false) @@ -357,7 +359,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( URL, - new SettingsConfiguration.Builder().setDescription("") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription("") .setLabel("URL") .setRequired(true) .setSensitive(false) @@ -368,7 +370,9 @@ public static InferenceServiceConfiguration get() { configurationMap.put( MAX_INPUT_TOKENS, - new SettingsConfiguration.Builder().setDescription("Allows you to specify the maximum number of tokens per input.") + new SettingsConfiguration.Builder(EnumSet.of(TaskType.TEXT_EMBEDDING)).setDescription( + "Allows you to specify the maximum number of tokens per input." + ) .setLabel("Maximum Input Tokens") .setRequired(false) .setSensitive(false) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java index ed76df5875562..7ad70fc88054d 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIService.java @@ -339,8 +339,8 @@ public static InferenceServiceConfiguration get() { () -> { var configurationMap = new HashMap(); - configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration()); - configurationMap.putAll(RateLimitSettings.toSettingsConfiguration()); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); + configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); return new InferenceServiceConfiguration.Builder().setService(NAME) .setName(SERVICE_NAME) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java index 129d9023a1ebc..3e40575e42faf 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/mistral/MistralService.java @@ -318,7 +318,7 @@ public static InferenceServiceConfiguration get() { configurationMap.put( MODEL_FIELD, - new SettingsConfiguration.Builder().setDescription( + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( "Refer to the Mistral models documentation for the list of available text embedding models." ) .setLabel("Model") @@ -331,7 +331,9 @@ public static InferenceServiceConfiguration get() { configurationMap.put( MAX_INPUT_TOKENS, - new SettingsConfiguration.Builder().setDescription("Allows you to specify the maximum number of tokens per input.") + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription( + "Allows you to specify the maximum number of tokens per input." + ) .setLabel("Maximum Input Tokens") .setRequired(false) .setSensitive(false) @@ -340,8 +342,8 @@ public static InferenceServiceConfiguration get() { .build() ); - configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration()); - configurationMap.putAll(RateLimitSettings.toSettingsConfiguration()); + configurationMap.putAll(DefaultSecretSettings.toSettingsConfiguration(supportedTaskTypes)); + configurationMap.putAll(RateLimitSettings.toSettingsConfiguration(supportedTaskTypes)); return new InferenceServiceConfiguration.Builder().setService(NAME) .setName(SERVICE_NAME) diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 3efd7c44c3e97..0ce5bc801b59f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -415,7 +415,9 @@ public static InferenceServiceConfiguration get() { configurationMap.put( MODEL_ID, - new SettingsConfiguration.Builder().setDescription("The name of the model to use for the inference task.") + new SettingsConfiguration.Builder(SUPPORTED_TASK_TYPES_FOR_SERVICES_API).setDescription( + "The name of the model to use for the inference task." + ) .setLabel("Model ID") .setRequired(true) .setSensitive(false) @@ -426,7 +428,9 @@ public static InferenceServiceConfiguration get() { configurationMap.put( ORGANIZATION, - new SettingsConfiguration.Builder().setDescription("The unique identifier of your organization.") + new SettingsConfiguration.Builder(SUPPORTED_TASK_TYPES_FOR_SERVICES_API).setDescription( + "The unique identifier of your organization." + ) .setLabel("Organization ID") .setRequired(false) .setSensitive(false) @@ -437,7 +441,9 @@ public static InferenceServiceConfiguration get() { configurationMap.put( URL, - new SettingsConfiguration.Builder().setDefaultValue("https://api.openai.com/v1/chat/completions") + new SettingsConfiguration.Builder(SUPPORTED_TASK_TYPES_FOR_SERVICES_API).setDefaultValue( + "https://api.openai.com/v1/chat/completions" + ) .setDescription( "The OpenAI API endpoint URL. For more information on the URL, refer to the " + "https://platform.openai.com/docs/api-reference." @@ -453,12 +459,14 @@ public static InferenceServiceConfiguration get() { configurationMap.putAll( DefaultSecretSettings.toSettingsConfigurationWithDescription( "The OpenAI API authentication key. For more details about generating OpenAI API keys, " - + "refer to the https://platform.openai.com/account/api-keys." + + "refer to the https://platform.openai.com/account/api-keys.", + SUPPORTED_TASK_TYPES_FOR_SERVICES_API ) ); configurationMap.putAll( RateLimitSettings.toSettingsConfigurationWithDescription( - "Default number of requests allowed per minute. For text_embedding is 3000. For completion is 500." + "Default number of requests allowed per minute. For text_embedding is 3000. For completion is 500.", + SUPPORTED_TASK_TYPES_FOR_SERVICES_API ) ); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java index 15e8128969ddb..d076c946889ed 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/DefaultSecretSettings.java @@ -17,10 +17,12 @@ import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.SecretSettings; import org.elasticsearch.inference.SettingsConfiguration; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; +import java.util.EnumSet; import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -51,11 +53,14 @@ public static DefaultSecretSettings fromMap(@Nullable Map map) { return new DefaultSecretSettings(secureApiToken); } - public static Map toSettingsConfigurationWithDescription(String description) { + public static Map toSettingsConfigurationWithDescription( + String description, + EnumSet supportedTaskTypes + ) { var configurationMap = new HashMap(); configurationMap.put( API_KEY, - new SettingsConfiguration.Builder().setDescription(description) + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription(description) .setLabel("API Key") .setRequired(true) .setSensitive(true) @@ -66,8 +71,11 @@ public static Map toSettingsConfigurationWithDesc return configurationMap; } - public static Map toSettingsConfiguration() { - return DefaultSecretSettings.toSettingsConfigurationWithDescription("API Key for the provider you're connecting to."); + public static Map toSettingsConfiguration(EnumSet supportedTaskTypes) { + return DefaultSecretSettings.toSettingsConfigurationWithDescription( + "API Key for the provider you're connecting to.", + supportedTaskTypes + ); } public DefaultSecretSettings { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettings.java index 30147a6d24a96..bc7e555120286 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/settings/RateLimitSettings.java @@ -12,12 +12,14 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.inference.SettingsConfiguration; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.inference.configuration.SettingsConfigurationFieldType; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.inference.services.ConfigurationParseContext; import java.io.IOException; +import java.util.EnumSet; import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -51,11 +53,14 @@ public static RateLimitSettings of( return requestsPerMinute == null ? defaultValue : new RateLimitSettings(requestsPerMinute); } - public static Map toSettingsConfigurationWithDescription(String description) { + public static Map toSettingsConfigurationWithDescription( + String description, + EnumSet supportedTaskTypes + ) { var configurationMap = new HashMap(); configurationMap.put( FIELD_NAME + "." + REQUESTS_PER_MINUTE_FIELD, - new SettingsConfiguration.Builder().setDescription(description) + new SettingsConfiguration.Builder(supportedTaskTypes).setDescription(description) .setLabel("Rate Limit") .setRequired(false) .setSensitive(false) @@ -66,8 +71,8 @@ public static Map toSettingsConfigurationWithDesc return configurationMap; } - public static Map toSettingsConfiguration() { - return RateLimitSettings.toSettingsConfigurationWithDescription("Minimize the number of rate limit errors."); + public static Map toSettingsConfiguration(EnumSet supportedTaskTypes) { + return RateLimitSettings.toSettingsConfigurationWithDescription("Minimize the number of rate limit errors.", supportedTaskTypes); } /** diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java index b5bf77cbb3c7d..e4de3d6beb800 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilder.java @@ -23,6 +23,9 @@ public static ModelValidator buildModelValidator(TaskType taskType) { case COMPLETION -> { return new ChatCompletionModelValidator(new SimpleServiceIntegrationValidator()); } + case CHAT_COMPLETION -> { + return new ChatCompletionModelValidator(new SimpleChatCompletionServiceIntegrationValidator()); + } case SPARSE_EMBEDDING, RERANK, ANY -> { return new SimpleModelValidator(new SimpleServiceIntegrationValidator()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/SimpleChatCompletionServiceIntegrationValidator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/SimpleChatCompletionServiceIntegrationValidator.java new file mode 100644 index 0000000000000..1092d84a6ef6b --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/validation/SimpleChatCompletionServiceIntegrationValidator.java @@ -0,0 +1,59 @@ + +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.validation; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.Model; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.elasticsearch.xpack.inference.external.http.sender.UnifiedChatInput; + +import java.util.List; + +import static org.elasticsearch.xpack.inference.external.http.sender.OpenAiCompletionRequestManager.USER_ROLE; + +/** + * This class uses the unified chat completion method to perform validation. + */ +public class SimpleChatCompletionServiceIntegrationValidator implements ServiceIntegrationValidator { + private static final List TEST_INPUT = List.of("how big"); + + @Override + public void validate(InferenceService service, Model model, ActionListener listener) { + var chatCompletionInput = new UnifiedChatInput(TEST_INPUT, USER_ROLE, false); + service.unifiedCompletionInfer( + model, + chatCompletionInput.getRequest(), + InferenceAction.Request.DEFAULT_TIMEOUT, + ActionListener.wrap(r -> { + if (r != null) { + listener.onResponse(r); + } else { + listener.onFailure( + new ElasticsearchStatusException( + "Could not complete inference endpoint creation as validation call to service returned null response.", + RestStatus.BAD_REQUEST + ) + ); + } + }, e -> { + listener.onFailure( + new ElasticsearchStatusException( + "Could not complete inference endpoint creation as validation call to service threw an exception.", + RestStatus.BAD_REQUEST, + e + ) + ); + }) + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java index a723e5a9dffdf..c0fc818e421d0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/BaseTransportInferenceActionTestCase.java @@ -57,11 +57,15 @@ public abstract class BaseTransportInferenceActionTestCase action; protected static final String serviceId = "serviceId"; - protected static final TaskType taskType = TaskType.COMPLETION; + protected final TaskType taskType; protected static final String inferenceId = "inferenceEntityId"; protected InferenceServiceRegistry serviceRegistry; protected InferenceStats inferenceStats; + public BaseTransportInferenceActionTestCase(TaskType taskType) { + this.taskType = taskType; + } + @Before public void setUp() throws Exception { super.setUp(); @@ -377,7 +381,7 @@ protected void mockModelAndServiceRegistry(InferenceService service) { when(serviceRegistry.getService(any())).thenReturn(Optional.of(service)); } - protected void mockValidLicenseState(){ + protected void mockValidLicenseState() { when(licenseState.isAllowed(InferencePlugin.INFERENCE_API_FEATURE)).thenReturn(true); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java index a5efe04c22c04..c303e029cb415 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportInferenceActionTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.inference.InferenceServiceRegistry; +import org.elasticsearch.inference.TaskType; import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.inference.action.InferenceAction; @@ -20,6 +21,10 @@ public class TransportInferenceActionTests extends BaseTransportInferenceActionTestCase { + public TransportInferenceActionTests() { + super(TaskType.COMPLETION); + } + @Override protected BaseTransportInferenceAction createAction( TransportService transportService, diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java index 3856a3d111b6e..e8e7d9ac21bed 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/action/TransportUnifiedCompletionActionTests.java @@ -33,6 +33,10 @@ public class TransportUnifiedCompletionActionTests extends BaseTransportInferenceActionTestCase { + public TransportUnifiedCompletionActionTests() { + super(TaskType.CHAT_COMPLETION); + } + @Override protected BaseTransportInferenceAction createAction( TransportService transportService, @@ -71,7 +75,7 @@ public void testThrows_IncompatibleTaskTypeException_WhenUsingATextEmbeddingInfe assertThat(e, isA(ElasticsearchStatusException.class)); assertThat( e.getMessage(), - is("Incompatible task_type for unified API, the requested type [" + requestTaskType + "] must be one of [completion]") + is("Incompatible task_type for unified API, the requested type [" + requestTaskType + "] must be one of [chat_completion]") ); assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST)); })); @@ -96,7 +100,7 @@ public void testThrows_IncompatibleTaskTypeException_WhenUsingRequestIsAny_Model assertThat(e, isA(ElasticsearchStatusException.class)); assertThat( e.getMessage(), - is("Incompatible task_type for unified API, the requested type [" + requestTaskType + "] must be one of [completion]") + is("Incompatible task_type for unified API, the requested type [" + requestTaskType + "] must be one of [chat_completion]") ); assertThat(((ElasticsearchStatusException) e).status(), is(RestStatus.BAD_REQUEST)); })); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java index fe076eb721ea2..8ea7e6c2bdb8d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java @@ -44,7 +44,7 @@ import static org.elasticsearch.xpack.inference.results.ChatCompletionResultsTests.buildExpectationCompletion; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; -import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel; +import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createCompletionModel; import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionRequestTaskSettingsTests.getChatCompletionRequestTaskSettingsMap; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModelTests.createModel; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsRequestTaskSettingsTests.createRequestTaskSettingsMap; @@ -325,7 +325,7 @@ public void testCreate_OpenAiChatCompletionModel() throws IOException { webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = createChatCompletionModel(getUrl(webServer), "org", "secret", "model", "user"); + var model = createCompletionModel(getUrl(webServer), "org", "secret", "model", "user"); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); @@ -389,7 +389,7 @@ public void testCreate_OpenAiChatCompletionModel_WithoutUser() throws IOExceptio webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = createChatCompletionModel(getUrl(webServer), "org", "secret", "model", null); + var model = createCompletionModel(getUrl(webServer), "org", "secret", "model", null); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap(null); var action = actionCreator.create(model, overriddenTaskSettings); @@ -452,7 +452,7 @@ public void testCreate_OpenAiChatCompletionModel_WithoutOrganization() throws IO webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = createChatCompletionModel(getUrl(webServer), null, "secret", "model", null); + var model = createCompletionModel(getUrl(webServer), null, "secret", "model", null); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); @@ -521,7 +521,7 @@ public void testCreate_OpenAiChatCompletionModel_FailsFromInvalidResponseFormat( """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var model = createChatCompletionModel(getUrl(webServer), null, "secret", "model", null); + var model = createCompletionModel(getUrl(webServer), null, "secret", "model", null); var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); var overriddenTaskSettings = getChatCompletionRequestTaskSettingsMap("overridden_user"); var action = actionCreator.create(model, overriddenTaskSettings); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java index ba74d2ab42c21..e248f77fe7728 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiChatCompletionActionTests.java @@ -51,7 +51,7 @@ import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; import static org.elasticsearch.xpack.inference.results.ChatCompletionResultsTests.buildExpectationCompletion; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; -import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel; +import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createCompletionModel; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -284,7 +284,7 @@ public void testExecute_ThrowsException_WhenInputIsGreaterThanOne() throws IOExc } private ExecutableAction createAction(String url, String org, String apiKey, String modelName, @Nullable String user, Sender sender) { - var model = createChatCompletionModel(url, org, apiKey, modelName, user); + var model = createCompletionModel(url, org, apiKey, modelName, user); var requestCreator = OpenAiCompletionRequestManager.of(model, threadPool); var errorMessage = constructFailedToSendRequestMessage(model.getServiceSettings().uri(), "OpenAI chat completions"); return new SingleInputSenderExecutableAction(sender, requestCreator, errorMessage, "OpenAI chat completions"); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceUnifiedChatCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceUnifiedChatCompletionRequestEntityTests.java index 15b4898650784..068e84fae35df 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceUnifiedChatCompletionRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/elastic/ElasticInferenceServiceUnifiedChatCompletionRequestEntityTests.java @@ -21,7 +21,7 @@ import java.util.ArrayList; import static org.elasticsearch.xpack.inference.Utils.assertJsonEquals; -import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel; +import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createCompletionModel; public class ElasticInferenceServiceUnifiedChatCompletionRequestEntityTests extends ESTestCase { @@ -40,7 +40,7 @@ public void testModelUserFieldsSerialization() throws IOException { var unifiedRequest = UnifiedCompletionRequest.of(messageList); UnifiedChatInput unifiedChatInput = new UnifiedChatInput(unifiedRequest, true); - OpenAiChatCompletionModel model = createChatCompletionModel("test-url", "organizationId", "api-key", "test-endpoint", null); + OpenAiChatCompletionModel model = createCompletionModel("test-url", "organizationId", "api-key", "test-endpoint", null); OpenAiUnifiedChatCompletionRequestEntity entity = new OpenAiUnifiedChatCompletionRequestEntity(unifiedChatInput, model); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestEntityTests.java index b0c58f3e94af8..ee0cdad5d552e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestEntityTests.java @@ -20,7 +20,7 @@ import java.util.ArrayList; import static org.elasticsearch.xpack.inference.Utils.assertJsonEquals; -import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel; +import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createCompletionModel; public class OpenAiUnifiedChatCompletionRequestEntityTests extends ESTestCase { @@ -40,7 +40,7 @@ public void testModelUserFieldsSerialization() throws IOException { var unifiedRequest = UnifiedCompletionRequest.of(messageList); UnifiedChatInput unifiedChatInput = new UnifiedChatInput(unifiedRequest, true); - OpenAiChatCompletionModel model = createChatCompletionModel("test-url", "organizationId", "api-key", "test-endpoint", USER); + OpenAiChatCompletionModel model = createCompletionModel("test-url", "organizationId", "api-key", "test-endpoint", USER); OpenAiUnifiedChatCompletionRequestEntity entity = new OpenAiUnifiedChatCompletionRequestEntity(unifiedChatInput, model); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestTests.java index 2be12c9b12e0b..ec4231bd73154 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiUnifiedChatCompletionRequestTests.java @@ -150,7 +150,7 @@ public static OpenAiUnifiedChatCompletionRequest createRequest( @Nullable String user, boolean stream ) { - var chatCompletionModel = OpenAiChatCompletionModelTests.createChatCompletionModel(url, org, apiKey, model, user); + var chatCompletionModel = OpenAiChatCompletionModelTests.createCompletionModel(url, org, apiKey, model, user); return new OpenAiUnifiedChatCompletionRequest(new UnifiedChatInput(List.of(input), "user", stream), chatCompletionModel); } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/unified/UnifiedChatCompletionRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/unified/UnifiedChatCompletionRequestEntityTests.java index d9388cab0e1ec..024b7aa532d90 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/unified/UnifiedChatCompletionRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/unified/UnifiedChatCompletionRequestEntityTests.java @@ -28,7 +28,7 @@ import java.util.Random; import static org.elasticsearch.xpack.inference.Utils.assertJsonEquals; -import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel; +import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createCompletionModel; public class UnifiedChatCompletionRequestEntityTests extends ESTestCase { @@ -46,7 +46,7 @@ public void testBasicSerialization() throws IOException { UnifiedCompletionRequest unifiedRequest = new UnifiedCompletionRequest(messageList, null, null, null, null, null, null, null); UnifiedChatInput unifiedChatInput = new UnifiedChatInput(unifiedRequest, true); - OpenAiChatCompletionModel model = createChatCompletionModel("test-url", "organizationId", "api-key", "test-endpoint", null); + OpenAiChatCompletionModel model = createCompletionModel("test-url", "organizationId", "api-key", "test-endpoint", null); OpenAiUnifiedChatCompletionRequestEntity entity = new OpenAiUnifiedChatCompletionRequestEntity(unifiedChatInput, model); @@ -111,7 +111,7 @@ public void testSerializationWithAllFields() throws IOException { UnifiedChatInput unifiedChatInput = new UnifiedChatInput(unifiedRequest, true); - OpenAiChatCompletionModel model = createChatCompletionModel("test-endpoint", "organizationId", "api-key", "model-name", null); + OpenAiChatCompletionModel model = createCompletionModel("test-endpoint", "organizationId", "api-key", "model-name", null); OpenAiUnifiedChatCompletionRequestEntity entity = new OpenAiUnifiedChatCompletionRequestEntity(unifiedChatInput, model); @@ -204,7 +204,7 @@ public void testSerializationWithNullOptionalFields() throws IOException { UnifiedChatInput unifiedChatInput = new UnifiedChatInput(unifiedRequest, true); - OpenAiChatCompletionModel model = createChatCompletionModel("test-endpoint", "organizationId", "api-key", "model-name", null); + OpenAiChatCompletionModel model = createCompletionModel("test-endpoint", "organizationId", "api-key", "model-name", null); OpenAiUnifiedChatCompletionRequestEntity entity = new OpenAiUnifiedChatCompletionRequestEntity(unifiedChatInput, model); @@ -253,7 +253,7 @@ public void testSerializationWithEmptyLists() throws IOException { UnifiedChatInput unifiedChatInput = new UnifiedChatInput(unifiedRequest, true); - OpenAiChatCompletionModel model = createChatCompletionModel("test-endpoint", "organizationId", "api-key", "model-name", null); + OpenAiChatCompletionModel model = createCompletionModel("test-endpoint", "organizationId", "api-key", "model-name", null); OpenAiUnifiedChatCompletionRequestEntity entity = new OpenAiUnifiedChatCompletionRequestEntity(unifiedChatInput, model); @@ -334,7 +334,7 @@ public void testSerializationWithNestedObjects() throws IOException { UnifiedChatInput unifiedChatInput = new UnifiedChatInput(unifiedRequest, true); - OpenAiChatCompletionModel model = createChatCompletionModel("test-endpoint", "organizationId", "api-key", randomModel, null); + OpenAiChatCompletionModel model = createCompletionModel("test-endpoint", "organizationId", "api-key", randomModel, null); OpenAiUnifiedChatCompletionRequestEntity entity = new OpenAiUnifiedChatCompletionRequestEntity(unifiedChatInput, model); @@ -452,7 +452,7 @@ public void testSerializationWithDifferentContentTypes() throws IOException { UnifiedChatInput unifiedChatInput = new UnifiedChatInput(unifiedRequest, true); - OpenAiChatCompletionModel model = createChatCompletionModel("test-endpoint", "organizationId", "api-key", "model-name", null); + OpenAiChatCompletionModel model = createCompletionModel("test-endpoint", "organizationId", "api-key", "model-name", null); OpenAiUnifiedChatCompletionRequestEntity entity = new OpenAiUnifiedChatCompletionRequestEntity(unifiedChatInput, model); @@ -516,7 +516,7 @@ public void testSerializationWithSpecialCharacters() throws IOException { UnifiedChatInput unifiedChatInput = new UnifiedChatInput(unifiedRequest, true); - OpenAiChatCompletionModel model = createChatCompletionModel("test-endpoint", "organizationId", "api-key", "model-name", null); + OpenAiChatCompletionModel model = createCompletionModel("test-endpoint", "organizationId", "api-key", "model-name", null); OpenAiUnifiedChatCompletionRequestEntity entity = new OpenAiUnifiedChatCompletionRequestEntity(unifiedChatInput, model); @@ -574,7 +574,7 @@ public void testSerializationWithBooleanFields() throws IOException { null // topP ); - OpenAiChatCompletionModel model = createChatCompletionModel("test-endpoint", "organizationId", "api-key", "model-name", null); + OpenAiChatCompletionModel model = createCompletionModel("test-endpoint", "organizationId", "api-key", "model-name", null); UnifiedChatInput unifiedChatInputTrue = new UnifiedChatInput(unifiedRequest, true); OpenAiUnifiedChatCompletionRequestEntity entityTrue = new OpenAiUnifiedChatCompletionRequestEntity(unifiedChatInputTrue, model); @@ -642,7 +642,7 @@ public void testSerializationWithoutContentField() throws IOException { UnifiedCompletionRequest unifiedRequest = new UnifiedCompletionRequest(messageList, null, null, null, null, null, null, null); UnifiedChatInput unifiedChatInput = new UnifiedChatInput(unifiedRequest, true); - OpenAiChatCompletionModel model = createChatCompletionModel("test-url", "organizationId", "api-key", "test-endpoint", null); + OpenAiChatCompletionModel model = createCompletionModel("test-url", "organizationId", "api-key", "test-endpoint", null); OpenAiUnifiedChatCompletionRequestEntity entity = new OpenAiUnifiedChatCompletionRequestEntity(unifiedChatInput, model); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java index e2cb93f731162..92544d5535acb 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/alibabacloudsearch/AlibabaCloudSearchServiceTests.java @@ -331,7 +331,7 @@ public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IO var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); try (var service = new AlibabaCloudSearchService(senderFactory, createWithEmptySettings(threadPool))) { - var model = OpenAiChatCompletionModelTests.createChatCompletionModel( + var model = OpenAiChatCompletionModelTests.createCompletionModel( randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), @@ -448,7 +448,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "sparse_embedding", "rerank", "completion"] }, "api_key": { "description": "A valid API key for the AlibabaCloud AI Search API.", @@ -456,7 +457,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": true, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "sparse_embedding", "rerank", "completion"] }, "service_id": { "description": "The name of the model service to use for the {infer} task.", @@ -464,7 +466,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "sparse_embedding", "rerank", "completion"] }, "host": { "description": "The name of the host address used for the {infer} task. You can find the host address at https://opensearch.console.aliyun.com/cn-shanghai/rag/api-key[ the API keys section] of the documentation.", @@ -472,7 +475,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "sparse_embedding", "rerank", "completion"] }, "rate_limit.requests_per_minute": { "description": "Minimize the number of rate limit errors.", @@ -480,7 +484,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["text_embedding", "sparse_embedding", "rerank", "completion"] }, "http_schema": { "description": "", @@ -488,7 +493,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "sparse_embedding", "rerank", "completion"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java index ce4d928458dca..c11d4b4c7923d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/amazonbedrock/AmazonBedrockServiceTests.java @@ -166,7 +166,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": true, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] }, "provider": { "description": "The model provider for your deployment.", @@ -174,7 +175,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] }, "access_key": { "description": "A valid AWS access key that has permissions to use Amazon Bedrock.", @@ -182,7 +184,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": true, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] }, "model": { "description": "The base model ID or an ARN to a custom model based on a foundational model.", @@ -190,7 +193,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] }, "rate_limit.requests_per_minute": { "description": "By default, the amazonbedrock service sets the number of requests allowed per minute to 240.", @@ -198,7 +202,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["text_embedding", "completion"] }, "region": { "description": "The region that your model or ARN is deployed in.", @@ -206,7 +211,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java index 7eb7ad1d0a19e..33101a3e02661 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/anthropic/AnthropicServiceTests.java @@ -614,7 +614,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": true, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["completion"] }, "rate_limit.requests_per_minute": { "description": "By default, the anthropic service sets the number of requests allowed per minute to 50.", @@ -622,7 +623,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["completion"] }, "model_id": { "description": "The name of the model to use for the inference task.", @@ -630,7 +632,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["completion"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java index 72ebd0b96bdc1..d2e4652b96488 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureaistudio/AzureAiStudioServiceTests.java @@ -1401,7 +1401,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] }, "provider": { "description": "The model provider for your deployment.", @@ -1409,7 +1410,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] }, "api_key": { "description": "API Key for the provider you're connecting to.", @@ -1417,7 +1419,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": true, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] }, "rate_limit.requests_per_minute": { "description": "Minimize the number of rate limit errors.", @@ -1425,7 +1428,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["text_embedding", "completion"] }, "target": { "description": "The target URL of your Azure AI Studio model deployment.", @@ -1433,7 +1437,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java index 5c69815cbb0ab..52527d74aad19 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/azureopenai/AzureOpenAiServiceTests.java @@ -1470,7 +1470,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": true, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] }, "entra_id": { "description": "You must provide either an API key or an Entra ID.", @@ -1478,7 +1479,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": true, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] }, "rate_limit.requests_per_minute": { "description": "The azureopenai service sets a default number of requests allowed per minute depending on the task type.", @@ -1486,7 +1488,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["text_embedding", "completion"] }, "deployment_id": { "description": "The deployment name of your deployed models.", @@ -1494,7 +1497,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] }, "resource_name": { "description": "The name of your Azure OpenAI resource.", @@ -1502,7 +1506,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] }, "api_version": { "description": "The Azure API version ID to use.", @@ -1510,7 +1515,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index dbad76fd46fc4..86b3edc4130da 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -1645,7 +1645,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": true, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "rerank", "completion"] }, "rate_limit.requests_per_minute": { "description": "Minimize the number of rate limit errors.", @@ -1653,7 +1654,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["text_embedding", "rerank", "completion"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java index 89bf1355ee767..5e7e93b1f5a75 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elastic/ElasticInferenceServiceTests.java @@ -582,7 +582,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["sparse_embedding" , "chat_completion"] }, "model_id": { "description": "The name of the model to use for the inference task.", @@ -590,7 +591,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["sparse_embedding" , "chat_completion"] }, "max_input_tokens": { "description": "Allows you to specify the maximum number of tokens per input.", @@ -598,7 +600,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["sparse_embedding"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java index 803151a5b3476..93b884a87fba2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/elasticsearch/ElasticsearchInternalServiceTests.java @@ -1564,7 +1564,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": true, - "type": "int" + "type": "int", + "supported_task_types": ["text_embedding", "sparse_embedding", "rerank"] }, "num_threads": { "default_value": 2, @@ -1573,7 +1574,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["text_embedding", "sparse_embedding", "rerank"] }, "model_id": { "default_value": ".multilingual-e5-small", @@ -1582,7 +1584,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "sparse_embedding", "rerank"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java index aa45666eb0fb1..26dae5d172fb0 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googleaistudio/GoogleAiStudioServiceTests.java @@ -1133,7 +1133,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": true, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] }, "rate_limit.requests_per_minute": { "description": "Minimize the number of rate limit errors.", @@ -1141,7 +1142,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["text_embedding", "completion"] }, "model_id": { "description": "ID of the LLM you're using.", @@ -1149,7 +1151,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java index 555e9f0785fa2..932dfc21e9396 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/googlevertexai/GoogleVertexAiServiceTests.java @@ -879,7 +879,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": true, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "rerank"] }, "project_id": { "description": "The GCP Project ID which has Vertex AI API(s) enabled. For more information on the URL, refer to the {geminiVertexAIDocs}.", @@ -887,7 +888,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "rerank"] }, "location": { "description": "Please provide the GCP region where the Vertex AI API(s) is enabled. For more information, refer to the {geminiVertexAIDocs}.", @@ -895,7 +897,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "rerank"] }, "rate_limit.requests_per_minute": { "description": "Minimize the number of rate limit errors.", @@ -903,7 +906,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["text_embedding", "rerank"] }, "model_id": { "description": "ID of the LLM you're using.", @@ -911,7 +915,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "rerank"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceElserServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceElserServiceTests.java index 0b774badd56b6..53e7c6c25fd47 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceElserServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceElserServiceTests.java @@ -150,7 +150,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": true, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["sparse_embedding"] }, "rate_limit.requests_per_minute": { "description": "Minimize the number of rate limit errors.", @@ -158,7 +159,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["sparse_embedding"] }, "url": { "description": "The URL endpoint to use for the requests.", @@ -166,7 +168,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["sparse_embedding"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java index 1399712450af1..f3137d7011cec 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -868,7 +868,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": true, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "sparse_embedding"] }, "rate_limit.requests_per_minute": { "description": "Minimize the number of rate limit errors.", @@ -876,7 +877,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["text_embedding", "sparse_embedding"] }, "url": { "default_value": "https://api.openai.com/v1/embeddings", @@ -885,7 +887,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "sparse_embedding"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java index 6e744fffbff41..ff99101fc4ee5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ibmwatsonx/IbmWatsonxServiceTests.java @@ -925,7 +925,7 @@ public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IO var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); try (var service = new IbmWatsonxServiceWithoutAuth(senderFactory, createWithEmptySettings(threadPool))) { - var model = OpenAiChatCompletionModelTests.createChatCompletionModel( + var model = OpenAiChatCompletionModelTests.createCompletionModel( randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), @@ -985,7 +985,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding"] }, "model_id": { "description": "The name of the model to use for the inference task.", @@ -993,7 +994,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding"] }, "api_version": { "description": "The IBM Watsonx API version ID to use.", @@ -1001,7 +1003,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding"] }, "max_input_tokens": { "description": "Allows you to specify the maximum number of tokens per input.", @@ -1009,7 +1012,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["text_embedding"] }, "url": { "description": "", @@ -1017,7 +1021,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java index 6e68bde6a1266..5fa14da4ba733 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/jinaai/JinaAIServiceTests.java @@ -1843,7 +1843,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": true, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "rerank"] }, "rate_limit.requests_per_minute": { "description": "Minimize the number of rate limit errors.", @@ -1851,7 +1852,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["text_embedding", "rerank"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java index 395e29240dfd4..95ac2cde0e31b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/mistral/MistralServiceTests.java @@ -758,7 +758,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": true, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding"] }, "model": { "description": "Refer to the Mistral models documentation for the list of available text embedding models.", @@ -766,7 +767,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding"] }, "rate_limit.requests_per_minute": { "description": "Minimize the number of rate limit errors.", @@ -774,7 +776,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["text_embedding"] }, "max_input_tokens": { "description": "Allows you to specify the maximum number of tokens per input.", @@ -782,7 +785,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["text_embedding"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index da912cd6e5d14..6fddbf4450283 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -74,7 +74,7 @@ import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectationFloat; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; -import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createChatCompletionModel; +import static org.elasticsearch.xpack.inference.services.openai.completion.OpenAiChatCompletionModelTests.createCompletionModel; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsServiceSettingsTests.getServiceSettingsMap; import static org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettingsTests.getTaskSettingsMap; import static org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettingsTests.getSecretSettingsMap; @@ -1084,16 +1084,16 @@ public void testInfer_StreamRequest() throws Exception { """; webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); - var result = streamChatCompletion(); + var result = streamCompletion(); InferenceEventsAssertion.assertThat(result).hasFinishedStream().hasNoErrors().hasEvent(""" {"completion":[{"delta":"hello, world"}]}"""); } - private InferenceServiceResults streamChatCompletion() throws IOException { + private InferenceServiceResults streamCompletion() throws IOException { var senderFactory = HttpRequestSenderTests.createSenderFactory(threadPool, clientManager); try (var service = new OpenAiService(senderFactory, createWithEmptySettings(threadPool))) { - var model = OpenAiChatCompletionModelTests.createChatCompletionModel(getUrl(webServer), "org", "secret", "model", "user"); + var model = OpenAiChatCompletionModelTests.createCompletionModel(getUrl(webServer), "org", "secret", "model", "user"); PlainActionFuture listener = new PlainActionFuture<>(); service.infer( model, @@ -1122,7 +1122,7 @@ public void testInfer_StreamRequest_ErrorResponse() throws Exception { }"""; webServer.enqueue(new MockResponse().setResponseCode(401).setBody(responseJson)); - var result = streamChatCompletion(); + var result = streamCompletion(); InferenceEventsAssertion.assertThat(result) .hasFinishedStream() @@ -1527,7 +1527,7 @@ public void testCheckModelConfig_ReturnsNewModelReference_DoesNotOverrideSimilar public void testUpdateModelWithEmbeddingDetails_InvalidModelProvided() throws IOException { try (var service = createOpenAiService()) { - var model = createChatCompletionModel( + var model = createCompletionModel( randomAlphaOfLength(10), randomAlphaOfLength(10), randomAlphaOfLength(10), @@ -1749,7 +1749,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": true, "updatable": true, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion", "chat_completion"] }, "organization_id": { "description": "The unique identifier of your organization.", @@ -1757,7 +1758,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion", "chat_completion"] }, "rate_limit.requests_per_minute": { "description": "Default number of requests allowed per minute. For text_embedding is 3000. For completion is 500.", @@ -1765,7 +1767,8 @@ public void testGetConfiguration() throws Exception { "required": false, "sensitive": false, "updatable": false, - "type": "int" + "type": "int", + "supported_task_types": ["text_embedding", "completion", "chat_completion"] }, "model_id": { "description": "The name of the model to use for the inference task.", @@ -1773,7 +1776,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion", "chat_completion"] }, "url": { "default_value": "https://api.openai.com/v1/chat/completions", @@ -1782,7 +1786,8 @@ public void testGetConfiguration() throws Exception { "required": true, "sensitive": false, "updatable": false, - "type": "str" + "type": "str", + "supported_task_types": ["text_embedding", "completion", "chat_completion"] } } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModelTests.java index 2a5415f45c6d9..e6d23012fae35 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/completion/OpenAiChatCompletionModelTests.java @@ -24,16 +24,16 @@ public class OpenAiChatCompletionModelTests extends ESTestCase { public void testOverrideWith_OverridesUser() { - var model = createChatCompletionModel("url", "org", "api_key", "model_name", null); + var model = createCompletionModel("url", "org", "api_key", "model_name", null); var requestTaskSettingsMap = getChatCompletionRequestTaskSettingsMap("user_override"); var overriddenModel = OpenAiChatCompletionModel.of(model, requestTaskSettingsMap); - assertThat(overriddenModel, is(createChatCompletionModel("url", "org", "api_key", "model_name", "user_override"))); + assertThat(overriddenModel, is(createCompletionModel("url", "org", "api_key", "model_name", "user_override"))); } public void testOverrideWith_EmptyMap() { - var model = createChatCompletionModel("url", "org", "api_key", "model_name", null); + var model = createCompletionModel("url", "org", "api_key", "model_name", null); var requestTaskSettingsMap = Map.of(); @@ -42,14 +42,14 @@ public void testOverrideWith_EmptyMap() { } public void testOverrideWith_NullMap() { - var model = createChatCompletionModel("url", "org", "api_key", "model_name", null); + var model = createCompletionModel("url", "org", "api_key", "model_name", null); var overriddenModel = OpenAiChatCompletionModel.of(model, (Map) null); assertThat(overriddenModel, sameInstance(model)); } public void testOverrideWith_UnifiedCompletionRequest_OverridesModelId() { - var model = createChatCompletionModel("url", "org", "api_key", "model_name", "user"); + var model = createCompletionModel("url", "org", "api_key", "model_name", "user"); var request = new UnifiedCompletionRequest( List.of(new UnifiedCompletionRequest.Message(new UnifiedCompletionRequest.ContentString("hello"), "role", null, null)), "different_model", @@ -63,12 +63,12 @@ public void testOverrideWith_UnifiedCompletionRequest_OverridesModelId() { assertThat( OpenAiChatCompletionModel.of(model, request), - is(createChatCompletionModel("url", "org", "api_key", "different_model", "user")) + is(createCompletionModel("url", "org", "api_key", "different_model", "user")) ); } public void testOverrideWith_UnifiedCompletionRequest_UsesModelFields_WhenRequestDoesNotOverride() { - var model = createChatCompletionModel("url", "org", "api_key", "model_name", "user"); + var model = createCompletionModel("url", "org", "api_key", "model_name", "user"); var request = new UnifiedCompletionRequest( List.of(new UnifiedCompletionRequest.Message(new UnifiedCompletionRequest.ContentString("hello"), "role", null, null)), null, // not overriding model @@ -80,10 +80,17 @@ public void testOverrideWith_UnifiedCompletionRequest_UsesModelFields_WhenReques null ); - assertThat( - OpenAiChatCompletionModel.of(model, request), - is(createChatCompletionModel("url", "org", "api_key", "model_name", "user")) - ); + assertThat(OpenAiChatCompletionModel.of(model, request), is(createCompletionModel("url", "org", "api_key", "model_name", "user"))); + } + + public static OpenAiChatCompletionModel createCompletionModel( + String url, + @Nullable String org, + String apiKey, + String modelName, + @Nullable String user + ) { + return createModelWithTaskType(url, org, apiKey, modelName, user, TaskType.COMPLETION); } public static OpenAiChatCompletionModel createChatCompletionModel( @@ -92,10 +99,21 @@ public static OpenAiChatCompletionModel createChatCompletionModel( String apiKey, String modelName, @Nullable String user + ) { + return createModelWithTaskType(url, org, apiKey, modelName, user, TaskType.CHAT_COMPLETION); + } + + public static OpenAiChatCompletionModel createModelWithTaskType( + String url, + @Nullable String org, + String apiKey, + String modelName, + @Nullable String user, + TaskType taskType ) { return new OpenAiChatCompletionModel( "id", - TaskType.COMPLETION, + taskType, "service", new OpenAiChatCompletionServiceSettings(modelName, url, org, null, null), new OpenAiChatCompletionTaskSettings(user), diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilderTests.java index 0153113be75d9..a854bbdec507a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/ModelValidatorBuilderTests.java @@ -35,6 +35,8 @@ private Map> taskTypeToModelValidatorC SimpleModelValidator.class, TaskType.COMPLETION, ChatCompletionModelValidator.class, + TaskType.CHAT_COMPLETION, + ChatCompletionModelValidator.class, TaskType.ANY, SimpleModelValidator.class ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/SimpleChatCompletionServiceIntegrationValidatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/SimpleChatCompletionServiceIntegrationValidatorTests.java new file mode 100644 index 0000000000000..f02c4662d49e4 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/validation/SimpleChatCompletionServiceIntegrationValidatorTests.java @@ -0,0 +1,150 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.services.validation; + +import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.inference.InferenceService; +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.Model; +import org.elasticsearch.inference.UnifiedCompletionRequest; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.inference.action.InferenceAction; +import org.junit.Before; +import org.mockito.ArgumentCaptor; +import org.mockito.Mock; + +import java.util.List; + +import static org.hamcrest.Matchers.is; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doThrow; +import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; +import static org.mockito.Mockito.when; +import static org.mockito.MockitoAnnotations.openMocks; + +public class SimpleChatCompletionServiceIntegrationValidatorTests extends ESTestCase { + + private static final UnifiedCompletionRequest EXPECTED_REQUEST = new UnifiedCompletionRequest( + List.of(new UnifiedCompletionRequest.Message(new UnifiedCompletionRequest.ContentString("how big"), "user", null, null)), + null, + null, + null, + null, + null, + null, + null + ); + + @Mock + private InferenceService mockInferenceService; + @Mock + private Model mockModel; + @Mock + private ActionListener mockActionListener; + @Mock + private InferenceServiceResults mockInferenceServiceResults; + + private SimpleChatCompletionServiceIntegrationValidator underTest; + + @Before + public void setup() { + openMocks(this); + + underTest = new SimpleChatCompletionServiceIntegrationValidator(); + + when(mockActionListener.delegateFailureAndWrap(any())).thenCallRealMethod(); + } + + public void testValidate_ServiceThrowsException() { + doThrow(ElasticsearchStatusException.class).when(mockInferenceService) + .unifiedCompletionInfer(eq(mockModel), eq(EXPECTED_REQUEST), eq(InferenceAction.Request.DEFAULT_TIMEOUT), any()); + + assertThrows(ElasticsearchStatusException.class, () -> underTest.validate(mockInferenceService, mockModel, mockActionListener)); + + verifyCallToService(); + } + + public void testValidate_SuccessfulCallToService() { + mockSuccessfulCallToService(mockInferenceServiceResults); + verify(mockActionListener).onResponse(mockInferenceServiceResults); + verifyCallToService(); + } + + public void testValidate_CallsListenerOnFailure_WhenServiceResponseIsNull() { + mockNullResponseFromService(); + + var captor = ArgumentCaptor.forClass(ElasticsearchStatusException.class); + verify(mockActionListener).onFailure(captor.capture()); + + assertThat( + captor.getValue().getMessage(), + is("Could not complete inference endpoint creation as validation call to service returned null response.") + ); + assertThat(captor.getValue().status(), is(RestStatus.BAD_REQUEST)); + + verifyCallToService(); + } + + public void testValidate_CallsListenerOnFailure_WhenServiceThrowsException() { + var returnedException = new IllegalStateException("bad state"); + mockFailureResponseFromService(returnedException); + + var captor = ArgumentCaptor.forClass(ElasticsearchStatusException.class); + verify(mockActionListener).onFailure(captor.capture()); + + assertThat( + captor.getValue().getMessage(), + is("Could not complete inference endpoint creation as validation call to service threw an exception.") + ); + assertThat(captor.getValue().status(), is(RestStatus.BAD_REQUEST)); + assertThat(captor.getValue().getCause(), is(returnedException)); + + verifyCallToService(); + } + + private void mockSuccessfulCallToService(InferenceServiceResults result) { + doAnswer(ans -> { + ActionListener responseListener = ans.getArgument(3); + responseListener.onResponse(result); + return null; + }).when(mockInferenceService) + .unifiedCompletionInfer(eq(mockModel), eq(EXPECTED_REQUEST), eq(InferenceAction.Request.DEFAULT_TIMEOUT), any()); + + underTest.validate(mockInferenceService, mockModel, mockActionListener); + } + + private void mockNullResponseFromService() { + mockSuccessfulCallToService(null); + } + + private void mockFailureResponseFromService(Exception exception) { + doAnswer(ans -> { + ActionListener responseListener = ans.getArgument(3); + responseListener.onFailure(exception); + return null; + }).when(mockInferenceService) + .unifiedCompletionInfer(eq(mockModel), eq(EXPECTED_REQUEST), eq(InferenceAction.Request.DEFAULT_TIMEOUT), any()); + + underTest.validate(mockInferenceService, mockModel, mockActionListener); + } + + private void verifyCallToService() { + verify(mockInferenceService).unifiedCompletionInfer( + eq(mockModel), + eq(EXPECTED_REQUEST), + eq(InferenceAction.Request.DEFAULT_TIMEOUT), + any() + ); + verifyNoMoreInteractions(mockInferenceService, mockModel, mockActionListener, mockInferenceServiceResults); + } +} diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java index 99acbec04551e..b5a3ff482c3cf 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/LogsIndexModeCustomSettingsIT.java @@ -122,6 +122,14 @@ public void testConfigureStoredSourceBeforeIndexCreation() throws IOException { var mapping = getMapping(client, getDataStreamBackingIndex(client, "logs-custom-dev", 0)); String sourceMode = (String) subObject("_source").apply(mapping).get("mode"); assertThat(sourceMode, equalTo("stored")); + + request = new Request("GET", "/_migration/deprecations"); + var nodeSettings = (Map) ((List) entityAsMap(client.performRequest(request)).get("node_settings")).getFirst(); + assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); + assertThat( + (String) nodeSettings.get("details"), + containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [logs@custom]") + ); } public void testConfigureDisabledSourceBeforeIndexCreation() { @@ -196,6 +204,14 @@ public void testConfigureStoredSourceWhenIndexIsCreated() throws IOException { var mapping = getMapping(client, getDataStreamBackingIndex(client, "logs-custom-dev", 0)); String sourceMode = (String) subObject("_source").apply(mapping).get("mode"); assertThat(sourceMode, equalTo("stored")); + + request = new Request("GET", "/_migration/deprecations"); + var nodeSettings = (Map) ((List) entityAsMap(client.performRequest(request)).get("node_settings")).getFirst(); + assertThat(nodeSettings.get("message"), equalTo(SourceFieldMapper.DEPRECATION_WARNING)); + assertThat( + (String) nodeSettings.get("details"), + containsString(SourceFieldMapper.DEPRECATION_WARNING + " Affected component templates: [logs@custom]") + ); } public void testConfigureDisabledSourceWhenIndexIsCreated() throws IOException { diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java index e411f2f3f314d..1e4c28e72aaeb 100644 --- a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/qa/StandardVersusLogsIndexModeChallengeRestIT.java @@ -181,7 +181,7 @@ protected static void waitForLogs(RestClient client) throws Exception { } public void testMatchAllQuery() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 80); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -199,7 +199,7 @@ public void testMatchAllQuery() throws IOException { } public void testTermsQuery() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 80); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -217,7 +217,7 @@ public void testTermsQuery() throws IOException { } public void testHistogramAggregation() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 80); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -235,7 +235,7 @@ public void testHistogramAggregation() throws IOException { } public void testTermsAggregation() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 80); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -253,7 +253,7 @@ public void testTermsAggregation() throws IOException { } public void testDateHistogramAggregation() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 80); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -271,7 +271,7 @@ public void testDateHistogramAggregation() throws IOException { } public void testEsqlSource() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 80); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -287,7 +287,7 @@ public void testEsqlSource() throws IOException { } public void testEsqlTermsAggregation() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 80); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); @@ -302,7 +302,7 @@ public void testEsqlTermsAggregation() throws IOException { } public void testEsqlTermsAggregationByMethod() throws IOException { - int numberOfDocuments = ESTestCase.randomIntBetween(20, 100); + int numberOfDocuments = ESTestCase.randomIntBetween(20, 80); final List documents = generateDocuments(numberOfDocuments); indexDocuments(documents); diff --git a/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/seqno/RetentionLeaseRestIT.java b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/seqno/RetentionLeaseRestIT.java new file mode 100644 index 0000000000000..a1fa73768a1d3 --- /dev/null +++ b/x-pack/plugin/logsdb/src/javaRestTest/java/org/elasticsearch/xpack/logsdb/seqno/RetentionLeaseRestIT.java @@ -0,0 +1,368 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb.seqno; + +import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.elasticsearch.test.rest.ESRestTestCase; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.junit.ClassRule; + +import java.io.IOException; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.format.DateTimeFormatter; +import java.util.List; +import java.util.Locale; +import java.util.Map; + +public class RetentionLeaseRestIT extends ESRestTestCase { + private static final String ADD_RETENTION_LEASE_ENDPOINT = "/%s/seq_no/add_retention_lease"; + private static final String BULK_INDEX_ENDPOINT = "/%s/_bulk"; + private static final String[] DOCUMENT_NAMES = { "alpha", "beta", "gamma", "delta" }; + + @ClassRule + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.security.enabled", "false") + .setting("xpack.license.self_generated.type", "trial") + .build(); + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } + + public void testAddRetentionLeaseSuccessfully() throws IOException { + final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); + createIndex( + indexName, + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() + ); + assertTrue(indexExists(indexName)); + + assertOK(bulkIndex(indexName, randomIntBetween(10, 20))); + + final Request retentionLeaseRequest = new Request("PUT", String.format(Locale.ROOT, ADD_RETENTION_LEASE_ENDPOINT, indexName)); + final String retentionLeaseId = randomAlphaOfLength(6); + final String retentionLeaseSource = randomAlphaOfLength(8); + retentionLeaseRequest.addParameter("id", retentionLeaseId); + retentionLeaseRequest.addParameter("source", retentionLeaseSource); + + final Response response = client().performRequest(retentionLeaseRequest); + assertOK(response); + + assertRetentionLeaseResponseContent(response, indexName, indexName, retentionLeaseId, retentionLeaseSource); + assertRetentionLeaseExists(indexName, retentionLeaseId, retentionLeaseSource); + } + + public void testAddRetentionLeaseWithoutIdAndSource() throws IOException { + final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); + createIndex( + indexName, + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() + ); + assertTrue(indexExists(indexName)); + + assertOK(bulkIndex(indexName, randomIntBetween(10, 20))); + + final Request retentionLeaseRequest = new Request("PUT", String.format(Locale.ROOT, ADD_RETENTION_LEASE_ENDPOINT, indexName)); + + final Response response = client().performRequest(retentionLeaseRequest); + assertOK(response); + + assertRetentionLeaseResponseContent(response, indexName, indexName, null, null); + } + + public void testAddRetentionLeaseToDataStream() throws IOException { + final String templateName = randomAlphanumericOfLength(8).toLowerCase(Locale.ROOT); + assertOK(createIndexTemplate(templateName, """ + { + "index_patterns": [ "test-*-*" ], + "data_stream": {}, + "priority": 100, + "template": { + "settings": { + "number_of_shards": 1, + "number_of_replicas": 0 + }, + "mappings": { + "properties": { + "@timestamp": { + "type": "date" + }, + "name": { + "type": "keyword" + } + } + } + } + } + """)); + + final String dataStreamName = "test-" + + randomAlphanumericOfLength(5).toLowerCase(Locale.ROOT) + + "-" + + randomAlphaOfLength(5).toLowerCase(Locale.ROOT); + assertOK(createDataStream(dataStreamName)); + assertOK(bulkIndex(dataStreamName, randomIntBetween(10, 20))); + + final Request retentionLeaseRequest = new Request("PUT", String.format(Locale.ROOT, ADD_RETENTION_LEASE_ENDPOINT, dataStreamName)); + final String retentionLeaseId = randomAlphaOfLength(6); + final String retentionLeaseSource = randomAlphaOfLength(8); + retentionLeaseRequest.addParameter("id", retentionLeaseId); + retentionLeaseRequest.addParameter("source", retentionLeaseSource); + + final Response response = client().performRequest(retentionLeaseRequest); + assertOK(response); + + final String dataStreamBackingIndex = getFirstBackingIndex(dataStreamName); + assertRetentionLeaseResponseContent(response, dataStreamName, dataStreamBackingIndex, retentionLeaseId, retentionLeaseSource); + assertRetentionLeaseExists(dataStreamBackingIndex, retentionLeaseId, retentionLeaseSource); + } + + public void testAddRetentionLeaseUsingAlias() throws IOException { + final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); + createIndex( + indexName, + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() + ); + assertTrue(indexExists(indexName)); + + final String aliasName = randomAlphanumericOfLength(8).toLowerCase(Locale.ROOT); + final Request putAliasRequest = new Request("PUT", "/" + indexName + "/_alias/" + aliasName); + assertOK(client().performRequest(putAliasRequest)); + + assertOK(bulkIndex(aliasName, randomIntBetween(10, 20))); + + final Request retentionLeaseRequest = new Request("PUT", String.format(Locale.ROOT, ADD_RETENTION_LEASE_ENDPOINT, aliasName)); + final String retentionLeaseId = randomAlphaOfLength(6); + final String retentionLeaseSource = randomAlphaOfLength(8); + retentionLeaseRequest.addParameter("id", retentionLeaseId); + retentionLeaseRequest.addParameter("source", retentionLeaseSource); + + final Response response = client().performRequest(retentionLeaseRequest); + assertOK(response); + + assertRetentionLeaseResponseContent(response, aliasName, indexName, retentionLeaseId, retentionLeaseSource); + assertRetentionLeaseExists(indexName, retentionLeaseId, retentionLeaseSource); + } + + public void testAddRetentionLeaseMissingIndex() throws IOException { + final String missingIndexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); + assertFalse(indexExists(missingIndexName)); + + final Request retentionLeaseRequest = new Request( + "PUT", + String.format(Locale.ROOT, ADD_RETENTION_LEASE_ENDPOINT, missingIndexName) + ); + final ResponseException exception = assertThrows(ResponseException.class, () -> client().performRequest(retentionLeaseRequest)); + assertResponseException(exception, RestStatus.BAD_REQUEST, "Error adding retention lease for [" + missingIndexName + "]"); + } + + public void testAddRetentionLeaseInvalidParameters() throws IOException { + final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); + createIndex( + indexName, + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() + ); + assertTrue(indexExists(indexName)); + assertOK(bulkIndex(indexName, randomIntBetween(10, 20))); + + final Request retentionLeaseRequest = new Request("PUT", String.format(Locale.ROOT, ADD_RETENTION_LEASE_ENDPOINT, indexName)); + retentionLeaseRequest.addParameter("id", null); + retentionLeaseRequest.addParameter("source", randomBoolean() ? UUIDs.randomBase64UUID() : "test-source"); + + final ResponseException exception = assertThrows(ResponseException.class, () -> client().performRequest(retentionLeaseRequest)); + assertResponseException(exception, RestStatus.BAD_REQUEST, "retention lease ID can not be empty"); + } + + public void testAddMultipleRetentionLeasesForSameShard() throws IOException { + final String indexName = randomAlphanumericOfLength(10).toLowerCase(Locale.ROOT); + createIndex( + indexName, + Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 1).put(IndexMetadata.SETTING_NUMBER_OF_REPLICAS, 0).build() + ); + assertTrue(indexExists(indexName)); + assertOK(bulkIndex(indexName, randomIntBetween(10, 20))); + + int numberOfLeases = randomIntBetween(2, 5); + for (int i = 0; i < numberOfLeases; i++) { + final Request retentionLeaseRequest = new Request("PUT", String.format(Locale.ROOT, ADD_RETENTION_LEASE_ENDPOINT, indexName)); + retentionLeaseRequest.addParameter("id", "lease-" + i); + retentionLeaseRequest.addParameter("source", "test-source-" + i); + + final Response response = client().performRequest(retentionLeaseRequest); + assertOK(response); + + assertRetentionLeaseResponseContent(response, indexName, indexName, "lease-" + i, "test-source-" + i); + } + + for (int i = 0; i < numberOfLeases; i++) { + assertRetentionLeaseExists(indexName, "lease-" + i, "test-source-" + i); + } + } + + private static Response bulkIndex(final String indexName, int numberOfDocuments) throws IOException { + final StringBuilder sb = new StringBuilder(); + long timestamp = System.currentTimeMillis(); + + for (int i = 0; i < numberOfDocuments; i++) { + sb.append( + String.format( + Locale.ROOT, + "{ \"index\": {} }\n{ \"@timestamp\": \"%s\", \"name\": \"%s\" }\n", + Instant.ofEpochMilli(timestamp).atOffset(ZoneOffset.UTC).format(DateTimeFormatter.ISO_OFFSET_DATE_TIME), + randomFrom(DOCUMENT_NAMES) + ) + ); + timestamp += 1000; + } + + final Request request = new Request("POST", String.format(Locale.ROOT, BULK_INDEX_ENDPOINT, indexName)); + request.setJsonEntity(sb.toString()); + request.addParameter("refresh", "true"); + return client().performRequest(request); + } + + private void assertResponseException(final ResponseException exception, final RestStatus expectedStatus, final String expectedMessage) { + assertEquals(expectedStatus.getStatus(), exception.getResponse().getStatusLine().getStatusCode()); + assertTrue(exception.getMessage().contains(expectedMessage)); + } + + private Map getRetentionLeases(final String indexName) throws IOException { + final Request statsRequest = new Request("GET", "/" + indexName + "/_stats"); + statsRequest.addParameter("level", "shards"); + + final Response response = client().performRequest(statsRequest); + assertOK(response); + + final Map responseMap = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + EntityUtils.toString(response.getEntity()), + false + ); + + @SuppressWarnings("unchecked") + final Map indices = (Map) responseMap.get("indices"); + if (indices == null || indices.containsKey(indexName) == false) { + throw new IllegalArgumentException("No shard stats found for: " + indexName); + } + + @SuppressWarnings("unchecked") + final Map shards = (Map) ((Map) indices.get(indexName)).get("shards"); + + @SuppressWarnings("unchecked") + final List> shardList = (List>) shards.get("0"); + + return getRetentionLeases(indexName, shardList); + } + + private static Map getRetentionLeases(final String indexName, final List> shardList) { + final Map shardStats = shardList.getFirst(); + + @SuppressWarnings("unchecked") + final Map retentionLeases = (Map) shardStats.get("retention_leases"); + if (retentionLeases == null) { + throw new IllegalArgumentException("No retention leases found for shard 0 of index: " + indexName); + } + return retentionLeases; + } + + private void assertRetentionLeaseExists( + final String indexAbstractionName, + final String expectedRetentionLeaseId, + final String expectedRetentionLeaseSource + ) throws IOException { + final Map retentionLeases = getRetentionLeases(indexAbstractionName); + + @SuppressWarnings("unchecked") + final List> leases = (List>) retentionLeases.get("leases"); + + boolean retentionLeaseExists = leases.stream().anyMatch(lease -> { + final String id = (String) lease.get("id"); + final String source = (String) lease.get("source"); + return expectedRetentionLeaseId.equals(id) && expectedRetentionLeaseSource.equals(source); + }); + + assertTrue( + "Retention lease with ID [" + expectedRetentionLeaseId + "] and source [" + expectedRetentionLeaseSource + "] does not exist.", + retentionLeaseExists + ); + } + + private Response createDataStream(final String dataStreamName) throws IOException { + return client().performRequest(new Request("PUT", "/_data_stream/" + dataStreamName)); + } + + private String getFirstBackingIndex(final String dataStreamName) throws IOException { + final Response response = client().performRequest(new Request("GET", "/_data_stream/" + dataStreamName)); + + final Map responseMap = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + EntityUtils.toString(response.getEntity()), + false + ); + + @SuppressWarnings("unchecked") + final List> dataStreams = (List>) responseMap.get("data_streams"); + + if (dataStreams == null || dataStreams.isEmpty()) { + throw new IllegalArgumentException("No data stream found for name: " + dataStreamName); + } + + @SuppressWarnings("unchecked") + final List> backingIndices = (List>) dataStreams.get(0).get("indices"); + + if (backingIndices == null || backingIndices.isEmpty()) { + throw new IllegalArgumentException("No backing indices found for data stream: " + dataStreamName); + } + + return (String) backingIndices.getFirst().get("index_name"); + } + + private static Response createIndexTemplate(final String templateName, final String mappings) throws IOException { + final Request request = new Request("PUT", "/_index_template/" + templateName); + request.setJsonEntity(mappings); + return client().performRequest(request); + } + + private void assertRetentionLeaseResponseContent( + final Response response, + final String expectedIndexAbstraction, + final String expectedConcreteIndex, + final String expectedLeaseId, + final String expectedLeaseSource + ) throws IOException { + final Map responseBody = XContentHelper.convertToMap( + JsonXContent.jsonXContent, + EntityUtils.toString(response.getEntity()), + false + ); + + assertEquals("Unexpected index abstraction in response", expectedIndexAbstraction, responseBody.get("index_abstraction")); + assertEquals("Unexpected concrete index in response", expectedConcreteIndex, responseBody.get("index")); + assertNotNull("Shard ID missing in response", responseBody.get("shard_id")); + + if (expectedLeaseId != null) { + assertEquals("Unexpected lease ID in response", expectedLeaseId, responseBody.get("id")); + } + if (expectedLeaseSource != null) { + assertEquals("Unexpected lease source in response", expectedLeaseSource, responseBody.get("source")); + } + } +} diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java index 6c18626edfb7a..c2515039ed8bf 100644 --- a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/LogsDBPlugin.java @@ -7,24 +7,38 @@ package org.elasticsearch.xpack.logsdb; +import org.elasticsearch.Build; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodes; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.IndexScopedSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.features.NodeFeature; import org.elasticsearch.index.IndexSettingProvider; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.ActionPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestHandler; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.action.XPackInfoFeatureAction; import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; +import org.elasticsearch.xpack.logsdb.seqno.RestAddRetentionLeaseAction; import java.util.ArrayList; import java.util.Collection; +import java.util.Collections; import java.util.List; +import java.util.function.Predicate; +import java.util.function.Supplier; import static org.elasticsearch.xpack.logsdb.SyntheticSourceLicenseService.FALLBACK_SETTING; @@ -92,6 +106,24 @@ public List> getSettings() { return actions; } + @Override + public Collection getRestHandlers( + Settings settings, + NamedWriteableRegistry namedWriteableRegistry, + RestController restController, + ClusterSettings clusterSettings, + IndexScopedSettings indexScopedSettings, + SettingsFilter settingsFilter, + IndexNameExpressionResolver indexNameExpressionResolver, + Supplier nodesInCluster, + Predicate clusterSupportsFeature + ) { + if (Build.current().isSnapshot()) { + return List.of(new RestAddRetentionLeaseAction()); + } + return Collections.emptyList(); + } + protected XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); } diff --git a/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/seqno/RestAddRetentionLeaseAction.java b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/seqno/RestAddRetentionLeaseAction.java new file mode 100644 index 0000000000000..1370b18a431bc --- /dev/null +++ b/x-pack/plugin/logsdb/src/main/java/org/elasticsearch/xpack/logsdb/seqno/RestAddRetentionLeaseAction.java @@ -0,0 +1,278 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.logsdb.seqno; + +import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.admin.indices.stats.ShardStats; +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.metadata.IndexAbstraction; +import org.elasticsearch.common.UUIDs; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.seqno.RetentionLeaseActions; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestChannel; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.RestResponse; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.rest.action.RestActionListener; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Comparator; +import java.util.List; +import java.util.Locale; +import java.util.concurrent.CompletableFuture; +import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeUnit; +import java.util.function.Supplier; + +import static org.elasticsearch.rest.RestRequest.Method.PUT; + +/** + * This class implements a REST API for adding a retention lease to a shard in Elasticsearch. + * Retention leases ensure that specific sequence numbers are retained, even as the global checkpoint + * advances during indexing. This guarantees seq_no values availability until the retention lease is + * removed. + * + * The API supports adding retention leases to indices, data streams, and index aliases. For data streams + * or aliases, the first backing index or underlying index is identified, and the retention lease is added + * to its shard. + * + * **Note:** This REST API is available only in Elasticsearch snapshot builds and is intended solely + * for benchmarking purposes, such as benchmarking operations like the shard changes API in Rally tracks. + * It is not intended for use in production environments. + * + * The response provides details about the added retention lease, including the target index, + * shard ID, retention lease ID, and source. + */ +public class RestAddRetentionLeaseAction extends BaseRestHandler { + + private static final int DEFAULT_TIMEOUT_SECONDS = 60; + private static final TimeValue SHARD_STATS_TIMEOUT = new TimeValue(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS); + private static final TimeValue GET_INDEX_TIMEOUT = new TimeValue(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS); + private static final String INDEX_PARAM = "index"; + private static final String ID_PARAM = "id"; + private static final String SOURCE_PARAM = "source"; + + @Override + public String getName() { + return "add_retention_lease_action"; + } + + @Override + public List routes() { + return List.of(new Route(PUT, "/{index}/seq_no/add_retention_lease")); + } + + /** + * Prepare a request to add a retention lease. When the target is an alias or data stream we just + * get the first shard of the first index using the shard stats api. + * + * @param request the request to execute + * @param client The NodeClient for executing the request. + * @return A RestChannelConsumer for handling the request. + * @throws IOException If an error occurs while preparing the request. + */ + @Override + protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { + final String indexAbstractionName = request.param(INDEX_PARAM); + final String retentionLeaseId = request.param(ID_PARAM, UUIDs.randomBase64UUID()); + final String retentionLeaseSource = request.param(SOURCE_PARAM, UUIDs.randomBase64UUID()); + + return channel -> asyncGetIndexName(client, indexAbstractionName, client.threadPool().executor(ThreadPool.Names.GENERIC)) + .thenCompose( + concreteIndexName -> asyncShardStats(client, concreteIndexName, client.threadPool().executor(ThreadPool.Names.GENERIC)) + .thenCompose( + shardStats -> addRetentionLease( + channel, + client, + indexAbstractionName, + concreteIndexName, + shardStats, + retentionLeaseId, + retentionLeaseSource + ) + ) + ) + .exceptionally(ex -> { + final String message = ex.getCause() != null ? ex.getCause().getMessage() : ex.getMessage(); + channel.sendResponse( + new RestResponse(RestStatus.BAD_REQUEST, "Error adding retention lease for [" + indexAbstractionName + "]: " + message) + ); + return null; + }); + } + + private static XContentBuilder addRetentionLeaseResponseToXContent( + final String indexAbstractionName, + final String concreteIndexName, + final ShardId shardId, + final String retentionLeaseId, + final String retentionLeaseSource + ) { + try (XContentBuilder builder = XContentFactory.jsonBuilder()) { + builder.startObject(); + builder.field("index_abstraction", indexAbstractionName); + builder.field("index", concreteIndexName); + builder.field("shard_id", shardId); + builder.field("id", retentionLeaseId); + builder.field("source", retentionLeaseSource); + builder.endObject(); + + return builder; + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + /** + * Adds a retention lease to a specific shard in an index, data stream, or alias. + * This operation is asynchronous and sends the response back to the client through the provided {@link RestChannel}. + * + * @param channel The {@link RestChannel} used to send the response back to the client. + * @param client The {@link NodeClient} used to execute the retention lease addition request. + * @param indexAbstractionName The name of the index, data stream, or alias for which the retention lease is being added. + * @param shardStats The {@link ShardStats} of the target shard where the retention lease will be added. + * @param retentionLeaseId A unique identifier for the retention lease being added. This identifies the lease in future operations. + * @param retentionLeaseSource A description or source of the retention lease request, often used for auditing or tracing purposes. + * @return A {@link CompletableFuture} that completes when the operation finishes. If the operation succeeds, the future completes + * successfully with {@code null}. If an error occurs, the future completes exceptionally with the corresponding exception. + * @throws ElasticsearchException If the request fails or encounters an unexpected error. + */ + private CompletableFuture addRetentionLease( + final RestChannel channel, + final NodeClient client, + final String indexAbstractionName, + final String concreteIndexName, + final ShardStats shardStats, + final String retentionLeaseId, + final String retentionLeaseSource + ) { + final CompletableFuture completableFuture = new CompletableFuture<>(); + try { + final ShardId shardId = shardStats.getShardRouting().shardId(); + final RetentionLeaseActions.AddRequest addRetentionLeaseRequest = new RetentionLeaseActions.AddRequest( + shardId, + retentionLeaseId, + RetentionLeaseActions.RETAIN_ALL, + retentionLeaseSource + ); + + client.execute(RetentionLeaseActions.ADD, addRetentionLeaseRequest, new RestActionListener<>(channel) { + + @Override + protected void processResponse(final ActionResponse.Empty empty) { + completableFuture.complete(null); + channel.sendResponse( + new RestResponse( + RestStatus.OK, + addRetentionLeaseResponseToXContent( + indexAbstractionName, + concreteIndexName, + shardId, + retentionLeaseId, + retentionLeaseSource + ) + ) + ); + } + }); + } catch (Exception e) { + completableFuture.completeExceptionally( + new ElasticsearchException("Failed to add retention lease for [" + indexAbstractionName + "]", e) + ); + } + return completableFuture; + } + + /** + * Execute an asynchronous task using a task supplier and an executor service. + * + * @param The type of data to be retrieved. + * @param task The supplier task that provides the data. + * @param executorService The {@link ExecutorService} for executing the asynchronous task. + * @param errorMessage The error message to be thrown if the task execution fails. + * @return A {@link CompletableFuture} that completes with the retrieved data. + */ + private static CompletableFuture supplyAsyncTask( + final Supplier task, + final ExecutorService executorService, + final String errorMessage + ) { + return CompletableFuture.supplyAsync(() -> { + try { + return task.get(); + } catch (Exception e) { + throw new ElasticsearchException(errorMessage, e); + } + }, executorService); + } + + /** + * Asynchronously retrieves the index name for a given index, alias or data stream. + * If the name represents a data stream, the name of the first backing index is returned. + * If the name represents an alias, the name of the first index that the alias points to is returned. + * + * @param client The {@link NodeClient} for executing the asynchronous request. + * @param indexAbstractionName The name of the index, alias or data stream. + * @return A {@link CompletableFuture} that completes with the retrieved index name. + */ + private static CompletableFuture asyncGetIndexName( + final NodeClient client, + final String indexAbstractionName, + final ExecutorService executorService + ) { + return supplyAsyncTask(() -> { + final ClusterState clusterState = client.admin() + .cluster() + .prepareState(new TimeValue(DEFAULT_TIMEOUT_SECONDS, TimeUnit.SECONDS)) + .get(GET_INDEX_TIMEOUT) + .getState(); + final IndexAbstraction indexAbstraction = clusterState.metadata().getIndicesLookup().get(indexAbstractionName); + if (indexAbstraction == null) { + throw new IllegalArgumentException( + String.format(Locale.ROOT, "Invalid index or data stream name [%s]", indexAbstractionName) + ); + } + if (indexAbstraction.getType() == IndexAbstraction.Type.DATA_STREAM + || indexAbstraction.getType() == IndexAbstraction.Type.ALIAS) { + return indexAbstraction.getIndices().getFirst().getName(); + } + return indexAbstractionName; + }, executorService, "Error while retrieving index name for or data stream [" + indexAbstractionName + "]"); + } + + /** + * Asynchronously retrieves the shard stats for a given index using an executor service. + * + * @param client The {@link NodeClient} for executing the asynchronous request. + * @param concreteIndexName The name of the index for which to retrieve shard statistics. + * @param executorService The {@link ExecutorService} for executing the asynchronous task. + * @return A {@link CompletableFuture} that completes with the retrieved ShardStats. + * @throws ElasticsearchException If an error occurs while retrieving shard statistics. + */ + private static CompletableFuture asyncShardStats( + final NodeClient client, + final String concreteIndexName, + final ExecutorService executorService + ) { + return supplyAsyncTask( + () -> Arrays.stream(client.admin().indices().prepareStats(concreteIndexName).clear().get(SHARD_STATS_TIMEOUT).getShards()) + .max(Comparator.comparingLong(shardStats -> shardStats.getCommitStats().getGeneration())) + .orElseThrow(() -> new ElasticsearchException("Unable to retrieve shard stats for: " + concreteIndexName)), + executorService, + "Error while retrieving shard stats for [" + concreteIndexName + "]" + ); + } +} diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java index 3a50cc8143485..1a765ca06efbc 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java @@ -13,11 +13,13 @@ import org.apache.lucene.index.DocValues; import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.LeafReader; import org.apache.lucene.index.LeafReaderContext; import org.apache.lucene.index.SortedSetDocValues; import org.apache.lucene.index.TermsEnum; import org.apache.lucene.search.SortField; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.ByteArrayStreamInput; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.util.BigArrays; @@ -35,6 +37,7 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.Mapper; import org.elasticsearch.index.mapper.MapperBuilderContext; +import org.elasticsearch.index.mapper.SourceLoader; import org.elasticsearch.index.mapper.SourceValueFetcher; import org.elasticsearch.index.mapper.StringFieldType; import org.elasticsearch.index.mapper.TextSearchInfo; @@ -46,6 +49,7 @@ import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.sort.BucketedSort; import org.elasticsearch.search.sort.SortOrder; +import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import java.io.IOException; @@ -72,7 +76,8 @@ * 2 for each key (one per document), a counted_terms aggregation on a counted_keyword field will consider * the actual count and report a count of 3 for each key.

    * - *

    Only regular source is supported; synthetic source won't work.

    + *

    Synthetic source is supported, but uses the fallback "ignore source" infrastructure unless the source_keep_mode is + * explicitly set to none in the field mapping parameters.

    */ public class CountedKeywordFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "counted_keyword"; @@ -306,6 +311,81 @@ public FieldMapper build(MapperBuilderContext context) { } } + private static class CountedKeywordFieldSyntheticSourceLoader extends SourceLoader.DocValuesBasedSyntheticFieldLoader { + private final String keywordsFieldName; + private final String countsFieldName; + private final String leafName; + + private SortedSetDocValues keywordsReader; + private BinaryDocValues countsReader; + private boolean hasValue; + + CountedKeywordFieldSyntheticSourceLoader(String keywordsFieldName, String countsFieldName, String leafName) { + this.keywordsFieldName = keywordsFieldName; + this.countsFieldName = countsFieldName; + this.leafName = leafName; + } + + @Override + public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf) throws IOException { + keywordsReader = leafReader.getSortedSetDocValues(keywordsFieldName); + countsReader = leafReader.getBinaryDocValues(countsFieldName); + + if (keywordsReader == null || countsReader == null) { + return null; + } + + return docId -> { + hasValue = keywordsReader.advanceExact(docId); + if (hasValue == false) { + return false; + } + + boolean countsHasValue = countsReader.advanceExact(docId); + assert countsHasValue; + + return true; + }; + } + + @Override + public boolean hasValue() { + return hasValue; + } + + @Override + public void write(XContentBuilder b) throws IOException { + if (hasValue == false) { + return; + } + + int[] counts = new BytesArray(countsReader.binaryValue()).streamInput().readVIntArray(); + boolean singleValue = counts.length == 1 && counts[0] == 1; + + if (singleValue) { + b.field(leafName); + } else { + b.startArray(leafName); + } + + for (int i = 0; i < keywordsReader.docValueCount(); i++) { + BytesRef currKeyword = keywordsReader.lookupOrd(keywordsReader.nextOrd()); + for (int j = 0; j < counts[i]; j++) { + b.utf8Value(currKeyword.bytes, currKeyword.offset, currKeyword.length); + } + } + + if (singleValue == false) { + b.endArray(); + } + } + + @Override + public String fieldName() { + return keywordsFieldName; + } + } + public static TypeParser PARSER = new TypeParser((n, c) -> new CountedKeywordFieldMapper.Builder(n)); private final FieldType fieldType; @@ -342,6 +422,11 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio } else { throw new IllegalArgumentException("Encountered unexpected token [" + parser.currentToken() + "]."); } + + if (values.isEmpty()) { + return; + } + int i = 0; int[] counts = new int[values.size()]; for (Map.Entry value : values.entrySet()) { @@ -355,13 +440,18 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio private void parseArray(DocumentParserContext context, SortedMap values) throws IOException { XContentParser parser = context.parser(); + int arrDepth = 1; while (true) { XContentParser.Token token = parser.nextToken(); if (token == XContentParser.Token.END_ARRAY) { - return; - } - if (token == XContentParser.Token.VALUE_STRING) { + arrDepth -= 1; + if (arrDepth <= 0) { + return; + } + } else if (token == XContentParser.Token.VALUE_STRING) { parseValue(parser, values); + } else if (token == XContentParser.Token.START_ARRAY) { + arrDepth += 1; } else if (token == XContentParser.Token.VALUE_NULL) { // ignore null values } else { @@ -399,4 +489,16 @@ public FieldMapper.Builder getMergeBuilder() { protected String contentType() { return CONTENT_TYPE; } + + @Override + protected SyntheticSourceSupport syntheticSourceSupport() { + var keepMode = sourceKeepMode(); + if (keepMode.isPresent() == false || keepMode.get() != SourceKeepMode.NONE) { + return super.syntheticSourceSupport(); + } + + var loader = new CountedKeywordFieldSyntheticSourceLoader(fullPath(), countFieldMapper.fullPath(), leafName()); + return new SyntheticSourceSupport.Native(loader); + } + } diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java index 2ffd4468c814a..c99edcf7352fa 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java @@ -10,11 +10,15 @@ import org.apache.lucene.index.DocValuesType; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; +import org.elasticsearch.core.CheckedConsumer; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperTestCase; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.search.lookup.SourceFilter; +import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.junit.AssumptionViolatedException; @@ -22,6 +26,9 @@ import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.stream.Stream; + +import static org.hamcrest.Matchers.equalTo; public class CountedKeywordFieldMapperTests extends MapperTestCase { @Override @@ -64,9 +71,103 @@ protected Object generateRandomInputValue(MappedFieldType ft) { return randomBoolean() ? null : randomAlphaOfLengthBetween(1, 10); } + public void testSyntheticSourceSingleNullValue() throws IOException { + DocumentMapper mapper = createSytheticSourceMapperService(mapping(b -> { + b.startObject("field"); + minimalMapping(b); + b.field("synthetic_source_keep", "none"); + b.endObject(); + })).documentMapper(); + + String expected = "{}"; + CheckedConsumer buildInput = b -> { + b.field("field"); + b.nullValue(); + }; + + assertThat(syntheticSource(mapper, buildInput), equalTo(expected)); + assertThat(syntheticSource(mapper, new SourceFilter(new String[] { "field" }, null), buildInput), equalTo(expected)); + assertThat(syntheticSource(mapper, new SourceFilter(null, new String[] { "field" }), buildInput), equalTo("{}")); + } + + public void testSyntheticSourceManyNullValue() throws IOException { + DocumentMapper mapper = createSytheticSourceMapperService(mapping(b -> { + b.startObject("field"); + minimalMapping(b); + b.field("synthetic_source_keep", "none"); + b.endObject(); + })).documentMapper(); + + int nullCount = randomIntBetween(1, 5); + + String expected = "{}"; + CheckedConsumer buildInput = b -> { + b.startArray("field"); + for (int i = 0; i < nullCount; i++) { + b.nullValue(); + } + b.endArray(); + }; + + assertThat(syntheticSource(mapper, buildInput), equalTo(expected)); + assertThat(syntheticSource(mapper, new SourceFilter(new String[] { "field" }, null), buildInput), equalTo(expected)); + assertThat(syntheticSource(mapper, new SourceFilter(null, new String[] { "field" }), buildInput), equalTo("{}")); + } + + @Override + public void testSyntheticSourceKeepAll() throws IOException { + // For now, native synthetic source is only supported when "synthetic_source_keep" mapping attribute is "none" + } + + @Override + public void testSyntheticSourceKeepArrays() throws IOException { + // For now, native synthetic source is only supported when "synthetic_source_keep" mapping attribute is "none" + } + + @Override + public void testSyntheticSourceKeepNone() throws IOException { + // For now, native synthetic source is only supported when "synthetic_source_keep" mapping attribute is "none" + } + @Override protected SyntheticSourceSupport syntheticSourceSupport(boolean ignoreMalformed) { - throw new AssumptionViolatedException("not supported"); + return new SyntheticSourceSupport() { + @Override + public SyntheticSourceExample example(int maxValues) throws IOException { + if (randomBoolean()) { + Tuple v = generateValue(); + return new SyntheticSourceExample(v.v1(), v.v2(), this::mapping); + } + int maxNullValues = 5; + List> values = randomList(1, maxValues, this::generateValue); + List in = Stream.concat(values.stream().map(Tuple::v1), randomList(0, maxNullValues, () -> (String) null).stream()) + .toList(); + + in = shuffledList(in); + + List outList = values.stream().map(Tuple::v2).sorted().toList(); + + Object out = outList.size() == 1 ? outList.get(0) : outList; + return new SyntheticSourceExample(in, out, this::mapping); + } + + private Tuple generateValue() { + String v = ESTestCase.randomAlphaOfLength(5); + return Tuple.tuple(v, v); + } + + private void mapping(XContentBuilder b) throws IOException { + minimalMapping(b); + // For now, synthetic source is only supported when "synthetic_source_keep" is "none". + // Once we implement true synthetic source support, we should remove this. + b.field("synthetic_source_keep", "none"); + } + + @Override + public List invalidExample() throws IOException { + return List.of(); + } + }; } @Override diff --git a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java index b32a6efb854d7..04caf3dbaa9d1 100644 --- a/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java +++ b/x-pack/plugin/migrate/src/internalClusterTest/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportActionIT.java @@ -28,7 +28,6 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.migrate.MigratePlugin; import org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction.ReindexDataStreamRequest; -import org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction.ReindexDataStreamResponse; import org.elasticsearch.xpack.migrate.task.ReindexDataStreamEnrichedStatus; import org.elasticsearch.xpack.migrate.task.ReindexDataStreamTask; @@ -61,8 +60,7 @@ public void testNonExistentDataStream() { ); assertThrows( ResourceNotFoundException.class, - () -> client().execute(new ActionType(ReindexDataStreamAction.NAME), reindexDataStreamRequest) - .actionGet() + () -> client().execute(new ActionType(ReindexDataStreamAction.NAME), reindexDataStreamRequest).actionGet() ); } @@ -74,12 +72,11 @@ public void testAlreadyUpToDateDataStream() throws Exception { dataStreamName ); final int backingIndexCount = createDataStream(dataStreamName); - ReindexDataStreamResponse response = client().execute( - new ActionType(ReindexDataStreamAction.NAME), + AcknowledgedResponse response = client().execute( + new ActionType(ReindexDataStreamAction.NAME), reindexDataStreamRequest ).actionGet(); - String persistentTaskId = response.getTaskId(); - assertThat(persistentTaskId, equalTo("reindex-data-stream-" + dataStreamName)); + String persistentTaskId = "reindex-data-stream-" + dataStreamName; AtomicReference runningTask = new AtomicReference<>(); for (TransportService transportService : internalCluster().getInstances(TransportService.class)) { TaskManager taskManager = transportService.getTaskManager(); diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java index 9e4cbb1082215..17925eb04851b 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamAction.java @@ -9,10 +9,10 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.FeatureFlag; @@ -20,7 +20,6 @@ import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; @@ -29,7 +28,7 @@ import java.util.Objects; import java.util.function.Predicate; -public class ReindexDataStreamAction extends ActionType { +public class ReindexDataStreamAction extends ActionType { public static final FeatureFlag REINDEX_DATA_STREAM_FEATURE_FLAG = new FeatureFlag("reindex_data_stream"); public static final String TASK_ID_PREFIX = "reindex-data-stream-"; @@ -47,48 +46,6 @@ public enum Mode { UPGRADE } - public static class ReindexDataStreamResponse extends ActionResponse implements ToXContentObject { - private final String taskId; - - public ReindexDataStreamResponse(String taskId) { - super(); - this.taskId = taskId; - } - - public ReindexDataStreamResponse(StreamInput in) throws IOException { - super(in); - this.taskId = in.readString(); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeString(taskId); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("acknowledged", true); - builder.endObject(); - return builder; - } - - public String getTaskId() { - return taskId; - } - - @Override - public int hashCode() { - return Objects.hashCode(taskId); - } - - @Override - public boolean equals(Object other) { - return other instanceof ReindexDataStreamResponse && taskId.equals(((ReindexDataStreamResponse) other).taskId); - } - - } - public static class ReindexDataStreamRequest extends ActionRequest implements IndicesRequest, ToXContent { private final Mode mode; private final String sourceDataStream; diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportAction.java index cc648c1984544..2d7c17db054a9 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamTransportAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.service.ClusterService; @@ -22,7 +23,6 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction.ReindexDataStreamRequest; -import org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction.ReindexDataStreamResponse; import org.elasticsearch.xpack.migrate.task.ReindexDataStreamTask; import org.elasticsearch.xpack.migrate.task.ReindexDataStreamTaskParams; @@ -33,7 +33,7 @@ * This transport action creates a new persistent task for reindexing the source data stream given in the request. On successful creation * of the persistent task, it responds with the persistent task id so that the user can monitor the persistent task. */ -public class ReindexDataStreamTransportAction extends HandledTransportAction { +public class ReindexDataStreamTransportAction extends HandledTransportAction { private final PersistentTasksService persistentTasksService; private final TransportService transportService; private final ClusterService clusterService; @@ -59,7 +59,7 @@ public ReindexDataStreamTransportAction( } @Override - protected void doExecute(Task task, ReindexDataStreamRequest request, ActionListener listener) { + protected void doExecute(Task task, ReindexDataStreamRequest request, ActionListener listener) { String sourceDataStreamName = request.getSourceDataStream(); Metadata metadata = clusterService.state().metadata(); DataStream dataStream = metadata.dataStreams().get(sourceDataStreamName); @@ -82,7 +82,7 @@ protected void doExecute(Task task, ReindexDataStreamRequest request, ActionList ReindexDataStreamTask.TASK_NAME, params, null, - ActionListener.wrap(startedTask -> listener.onResponse(new ReindexDataStreamResponse(persistentTaskId)), listener::onFailure) + ActionListener.wrap(startedTask -> listener.onResponse(AcknowledgedResponse.TRUE), listener::onFailure) ); } diff --git a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/rest/RestMigrationReindexAction.java b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/rest/RestMigrationReindexAction.java index 19cb439495e9a..a89f056477d2c 100644 --- a/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/rest/RestMigrationReindexAction.java +++ b/x-pack/plugin/migrate/src/main/java/org/elasticsearch/xpack/migrate/rest/RestMigrationReindexAction.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.migrate.rest; +import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestChannel; @@ -17,7 +18,6 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction; -import org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction.ReindexDataStreamResponse; import java.io.IOException; import java.util.Collections; @@ -63,14 +63,14 @@ public Set supportedCapabilities() { return Collections.unmodifiableSet(capabilities); } - static class ReindexDataStreamRestToXContentListener extends RestBuilderListener { + static class ReindexDataStreamRestToXContentListener extends RestBuilderListener { ReindexDataStreamRestToXContentListener(RestChannel channel) { super(channel); } @Override - public RestResponse buildResponse(ReindexDataStreamResponse response, XContentBuilder builder) throws Exception { + public RestResponse buildResponse(AcknowledgedResponse response, XContentBuilder builder) throws Exception { response.toXContent(builder, channel.request()); return new RestResponse(RestStatus.OK, builder); } diff --git a/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamResponseTests.java b/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamResponseTests.java deleted file mode 100644 index d886fc660d7a8..0000000000000 --- a/x-pack/plugin/migrate/src/test/java/org/elasticsearch/xpack/migrate/action/ReindexDataStreamResponseTests.java +++ /dev/null @@ -1,50 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.migrate.action; - -import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.test.AbstractWireSerializingTestCase; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.migrate.action.ReindexDataStreamAction.ReindexDataStreamResponse; - -import java.io.IOException; -import java.util.Map; - -import static org.elasticsearch.xcontent.ToXContent.EMPTY_PARAMS; -import static org.hamcrest.Matchers.equalTo; - -public class ReindexDataStreamResponseTests extends AbstractWireSerializingTestCase { - @Override - protected Writeable.Reader instanceReader() { - return ReindexDataStreamResponse::new; - } - - @Override - protected ReindexDataStreamResponse createTestInstance() { - return new ReindexDataStreamResponse(randomAlphaOfLength(40)); - } - - @Override - protected ReindexDataStreamResponse mutateInstance(ReindexDataStreamResponse instance) { - return createTestInstance(); - } - - public void testToXContent() throws IOException { - ReindexDataStreamResponse response = createTestInstance(); - try (XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent)) { - builder.humanReadable(true); - response.toXContent(builder, EMPTY_PARAMS); - try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { - assertThat(parser.map(), equalTo(Map.of("acknowledged", true))); - } - } - } -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java index 003bef914f72c..cd2ee3e01c6d4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPreviewDataFrameAnalyticsAction.java @@ -77,10 +77,10 @@ public TransportPreviewDataFrameAnalyticsAction( this.clusterService = clusterService; } - private static Map mergeRow(DataFrameDataExtractor.Row row, List fieldNames) { - return row.getValues() == null + private static Map mergeRow(String[] row, List fieldNames) { + return row == null ? Collections.emptyMap() - : IntStream.range(0, row.getValues().length).boxed().collect(Collectors.toMap(fieldNames::get, i -> row.getValues()[i])); + : IntStream.range(0, row.length).boxed().collect(Collectors.toMap(fieldNames::get, i -> row[i])); } @Override @@ -121,7 +121,7 @@ void preview(Task task, DataFrameAnalyticsConfig config, ActionListener { List fieldNames = extractor.getFieldNames(); - l.onResponse(new Response(rows.stream().map((r) -> mergeRow(r, fieldNames)).collect(Collectors.toList()))); + l.onResponse(new Response(rows.stream().map(r -> mergeRow(r, fieldNames)).collect(Collectors.toList()))); })); })); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java index 315d2249d00cb..ff96c73bc002c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractor.java @@ -19,7 +19,6 @@ import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; -import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.fetch.StoredFieldsContext; import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.xpack.core.ClientHelper; @@ -107,14 +106,14 @@ public void cancel() { isCancelled = true; } - public Optional> next() throws IOException { + public Optional next() throws IOException { if (hasNext() == false) { throw new NoSuchElementException(); } - Optional> hits = Optional.ofNullable(nextSearch()); - if (hits.isPresent() && hits.get().isEmpty() == false) { - lastSortKey = hits.get().get(hits.get().size() - 1).getSortKey(); + Optional hits = Optional.ofNullable(nextSearch()); + if (hits.isPresent() && hits.get().length > 0) { + lastSortKey = (long) hits.get()[hits.get().length - 1].getSortValues()[0]; } else { hasNext = false; } @@ -126,7 +125,7 @@ public Optional> next() throws IOException { * Does no sorting of the results. * @param listener To alert with the extracted rows */ - public void preview(ActionListener> listener) { + public void preview(ActionListener> listener) { SearchRequestBuilder searchRequestBuilder = new SearchRequestBuilder(client) // This ensures the search throws if there are failures and the scroll context gets cleared automatically @@ -155,22 +154,24 @@ public void preview(ActionListener> listener) { return; } - List rows = new ArrayList<>(searchResponse.getHits().getHits().length); + List rows = new ArrayList<>(searchResponse.getHits().getHits().length); for (SearchHit hit : searchResponse.getHits().getHits()) { - var unpooled = hit.asUnpooled(); - String[] extractedValues = extractValues(unpooled); - rows.add(extractedValues == null ? new Row(null, unpooled, true) : new Row(extractedValues, unpooled, false)); + String[] extractedValues = extractValues(hit); + rows.add(extractedValues); } delegate.onResponse(rows); }) ); } - protected List nextSearch() throws IOException { + protected SearchHit[] nextSearch() throws IOException { + if (isCancelled) { + return null; + } return tryRequestWithSearchResponse(() -> executeSearchRequest(buildSearchRequest())); } - private List tryRequestWithSearchResponse(Supplier request) throws IOException { + private SearchHit[] tryRequestWithSearchResponse(Supplier request) throws IOException { try { // We've set allow_partial_search_results to false which means if something @@ -179,7 +180,7 @@ private List tryRequestWithSearchResponse(Supplier request) try { LOGGER.trace(() -> "[" + context.jobId + "] Search response was obtained"); - List rows = processSearchResponse(searchResponse); + SearchHit[] rows = processSearchResponse(searchResponse); // Request was successfully executed and processed so we can restore the flag to retry if a future failure occurs hasPreviousSearchFailed = false; @@ -246,22 +247,12 @@ private void setFetchSource(SearchRequestBuilder searchRequestBuilder) { } } - private List processSearchResponse(SearchResponse searchResponse) { - if (searchResponse.getHits().getHits().length == 0) { + private SearchHit[] processSearchResponse(SearchResponse searchResponse) { + if (isCancelled || searchResponse.getHits().getHits().length == 0) { hasNext = false; return null; } - - SearchHits hits = searchResponse.getHits(); - List rows = new ArrayList<>(hits.getHits().length); - for (SearchHit hit : hits) { - if (isCancelled) { - hasNext = false; - break; - } - rows.add(createRow(hit)); - } - return rows; + return searchResponse.getHits().asUnpooled().getHits(); } private String extractNonProcessedValues(SearchHit hit, String organicFeature) { @@ -317,14 +308,13 @@ private String[] extractProcessedValue(ProcessedField processedField, SearchHit return extractedValue; } - private Row createRow(SearchHit hit) { - var unpooled = hit.asUnpooled(); - String[] extractedValues = extractValues(unpooled); + public Row createRow(SearchHit hit) { + String[] extractedValues = extractValues(hit); if (extractedValues == null) { - return new Row(null, unpooled, true); + return new Row(null, hit, true); } boolean isTraining = trainTestSplitter.get().isTraining(extractedValues); - Row row = new Row(extractedValues, unpooled, isTraining); + Row row = new Row(extractedValues, hit, isTraining); LOGGER.trace( () -> format( "[%s] Extracted row: sort key = [%s], is_training = [%s], values = %s", diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManager.java index d4c10e25a2ade..6205653ce9c0f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/AnalyticsProcessManager.java @@ -16,6 +16,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.search.SearchHit; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.job.messages.Messages; @@ -256,9 +257,14 @@ private static void writeDataRows( long rowsProcessed = 0; while (dataExtractor.hasNext()) { - Optional> rows = dataExtractor.next(); + Optional rows = dataExtractor.next(); if (rows.isPresent()) { - for (DataFrameDataExtractor.Row row : rows.get()) { + for (SearchHit searchHit : rows.get()) { + if (dataExtractor.isCancelled()) { + break; + } + rowsProcessed++; + DataFrameDataExtractor.Row row = dataExtractor.createRow(searchHit); if (row.shouldSkip()) { dataCountsTracker.incrementSkippedDocsCount(); } else { @@ -271,7 +277,6 @@ private static void writeDataRows( } } } - rowsProcessed += rows.get().size(); progressTracker.updateLoadingDataProgress(rowsProcessed >= totalRows ? 100 : (int) (rowsProcessed * 100.0 / totalRows)); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoiner.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoiner.java index ee91b0637bfc7..3e1968ca19ce1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoiner.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoiner.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.Nullable; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.dataframe.extractor.DataFrameDataExtractor; @@ -22,11 +23,9 @@ import org.elasticsearch.xpack.ml.utils.persistence.ResultsPersisterService; import java.io.IOException; -import java.util.Collections; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.LinkedList; -import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -97,6 +96,9 @@ private void addResultAndJoinIfEndOfBatch(RowResults rowResults) { private void joinCurrentResults() { try (LimitAwareBulkIndexer bulkIndexer = new LimitAwareBulkIndexer(settings, this::executeBulkRequest)) { while (currentResults.isEmpty() == false) { + if (dataExtractor.isCancelled()) { + break; + } RowResults result = currentResults.pop(); DataFrameDataExtractor.Row row = dataFrameRowsIterator.next(); checkChecksumsMatch(row, result); @@ -164,12 +166,12 @@ private void consumeDataExtractor() throws IOException { private class ResultMatchingDataFrameRows implements Iterator { - private List currentDataFrameRows = Collections.emptyList(); + private SearchHit[] currentDataFrameRows = SearchHits.EMPTY; private int currentDataFrameRowsIndex; @Override public boolean hasNext() { - return dataExtractor.hasNext() || currentDataFrameRowsIndex < currentDataFrameRows.size(); + return dataExtractor.hasNext() || currentDataFrameRowsIndex < currentDataFrameRows.length; } @Override @@ -177,7 +179,7 @@ public DataFrameDataExtractor.Row next() { DataFrameDataExtractor.Row row = null; while (hasNoMatch(row) && hasNext()) { advanceToNextBatchIfNecessary(); - row = currentDataFrameRows.get(currentDataFrameRowsIndex++); + row = dataExtractor.createRow(currentDataFrameRows[currentDataFrameRowsIndex++]); } if (hasNoMatch(row)) { @@ -191,13 +193,13 @@ private static boolean hasNoMatch(DataFrameDataExtractor.Row row) { } private void advanceToNextBatchIfNecessary() { - if (currentDataFrameRowsIndex >= currentDataFrameRows.size()) { - currentDataFrameRows = getNextDataRowsBatch().orElse(Collections.emptyList()); + if (currentDataFrameRowsIndex >= currentDataFrameRows.length) { + currentDataFrameRows = getNextDataRowsBatch().orElse(SearchHits.EMPTY); currentDataFrameRowsIndex = 0; } } - private Optional> getNextDataRowsBatch() { + private Optional getNextDataRowsBatch() { try { return dataExtractor.next(); } catch (IOException e) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java index 993e00bd4adf4..2ba9146533b78 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/extractor/DataFrameDataExtractorTests.java @@ -118,19 +118,19 @@ public void testTwoPageExtraction() throws IOException { assertThat(dataExtractor.hasNext(), is(true)); // First batch - Optional> rows = dataExtractor.next(); + Optional rows = dataExtractor.next(); assertThat(rows.isPresent(), is(true)); - assertThat(rows.get().size(), equalTo(3)); - assertThat(rows.get().get(0).getValues(), equalTo(new String[] { "11", "21" })); - assertThat(rows.get().get(1).getValues(), equalTo(new String[] { "12", "22" })); - assertThat(rows.get().get(2).getValues(), equalTo(new String[] { "13", "23" })); + assertThat(rows.get().length, equalTo(3)); + assertThat(dataExtractor.createRow(rows.get()[0]).getValues(), equalTo(new String[] { "11", "21" })); + assertThat(dataExtractor.createRow(rows.get()[1]).getValues(), equalTo(new String[] { "12", "22" })); + assertThat(dataExtractor.createRow(rows.get()[2]).getValues(), equalTo(new String[] { "13", "23" })); assertThat(dataExtractor.hasNext(), is(true)); // Second batch rows = dataExtractor.next(); assertThat(rows.isPresent(), is(true)); - assertThat(rows.get().size(), equalTo(1)); - assertThat(rows.get().get(0).getValues(), equalTo(new String[] { "31", "41" })); + assertThat(rows.get().length, equalTo(1)); + assertThat(dataExtractor.createRow(rows.get()[0]).getValues(), equalTo(new String[] { "31", "41" })); assertThat(dataExtractor.hasNext(), is(true)); // Third batch should return empty @@ -208,18 +208,18 @@ public void testRecoveryFromErrorOnSearch() throws IOException { assertThat(dataExtractor.hasNext(), is(true)); // First batch expected as normally since we'll retry after the error - Optional> rows = dataExtractor.next(); + Optional rows = dataExtractor.next(); assertThat(rows.isPresent(), is(true)); - assertThat(rows.get().size(), equalTo(2)); - assertThat(rows.get().get(0).getValues(), equalTo(new String[] { "11", "21" })); - assertThat(rows.get().get(1).getValues(), equalTo(new String[] { "12", "22" })); + assertThat(rows.get().length, equalTo(2)); + assertThat(dataExtractor.createRow(rows.get()[0]).getValues(), equalTo(new String[] { "11", "21" })); + assertThat(dataExtractor.createRow(rows.get()[1]).getValues(), equalTo(new String[] { "12", "22" })); assertThat(dataExtractor.hasNext(), is(true)); // We get second batch as we retried after the error rows = dataExtractor.next(); assertThat(rows.isPresent(), is(true)); - assertThat(rows.get().size(), equalTo(1)); - assertThat(rows.get().get(0).getValues(), equalTo(new String[] { "13", "23" })); + assertThat(rows.get().length, equalTo(1)); + assertThat(dataExtractor.createRow(rows.get()[0]).getValues(), equalTo(new String[] { "13", "23" })); assertThat(dataExtractor.hasNext(), is(true)); // Next batch should return empty @@ -262,10 +262,10 @@ public void testIncludeSourceIsFalseAndNoSourceFields() throws IOException { assertThat(dataExtractor.hasNext(), is(true)); - Optional> rows = dataExtractor.next(); + Optional rows = dataExtractor.next(); assertThat(rows.isPresent(), is(true)); - assertThat(rows.get().size(), equalTo(1)); - assertThat(rows.get().get(0).getValues(), equalTo(new String[] { "11", "21" })); + assertThat(rows.get().length, equalTo(1)); + assertThat(dataExtractor.createRow(rows.get()[0]).getValues(), equalTo(new String[] { "11", "21" })); assertThat(dataExtractor.hasNext(), is(true)); assertThat(dataExtractor.next(), isEmpty()); @@ -297,10 +297,10 @@ public void testIncludeSourceIsFalseAndAtLeastOneSourceField() throws IOExceptio assertThat(dataExtractor.hasNext(), is(true)); - Optional> rows = dataExtractor.next(); + Optional rows = dataExtractor.next(); assertThat(rows.isPresent(), is(true)); - assertThat(rows.get().size(), equalTo(1)); - assertThat(rows.get().get(0).getValues(), equalTo(new String[] { "11", "21" })); + assertThat(rows.get().length, equalTo(1)); + assertThat(dataExtractor.createRow(rows.get()[0]).getValues(), equalTo(new String[] { "11", "21" })); assertThat(dataExtractor.hasNext(), is(true)); assertThat(dataExtractor.next(), isEmpty()); @@ -364,18 +364,18 @@ public void testMissingValues_GivenSupported() throws IOException { assertThat(dataExtractor.hasNext(), is(true)); // First batch - Optional> rows = dataExtractor.next(); + Optional rows = dataExtractor.next(); assertThat(rows.isPresent(), is(true)); - assertThat(rows.get().size(), equalTo(3)); + assertThat(rows.get().length, equalTo(3)); - assertThat(rows.get().get(0).getValues(), equalTo(new String[] { "11", "21" })); - assertThat(rows.get().get(1).getValues()[0], equalTo(DataFrameDataExtractor.NULL_VALUE)); - assertThat(rows.get().get(1).getValues()[1], equalTo("22")); - assertThat(rows.get().get(2).getValues(), equalTo(new String[] { "13", "23" })); + assertThat(dataExtractor.createRow(rows.get()[0]).getValues(), equalTo(new String[] { "11", "21" })); + assertThat(dataExtractor.createRow(rows.get()[1]).getValues()[0], equalTo(DataFrameDataExtractor.NULL_VALUE)); + assertThat(dataExtractor.createRow(rows.get()[1]).getValues()[1], equalTo("22")); + assertThat(dataExtractor.createRow(rows.get()[2]).getValues(), equalTo(new String[] { "13", "23" })); - assertThat(rows.get().get(0).shouldSkip(), is(false)); - assertThat(rows.get().get(1).shouldSkip(), is(false)); - assertThat(rows.get().get(2).shouldSkip(), is(false)); + assertThat(dataExtractor.createRow(rows.get()[0]).shouldSkip(), is(false)); + assertThat(dataExtractor.createRow(rows.get()[1]).shouldSkip(), is(false)); + assertThat(dataExtractor.createRow(rows.get()[2]).shouldSkip(), is(false)); assertThat(dataExtractor.hasNext(), is(true)); @@ -399,17 +399,17 @@ public void testMissingValues_GivenNotSupported() throws IOException { assertThat(dataExtractor.hasNext(), is(true)); // First batch - Optional> rows = dataExtractor.next(); + Optional rows = dataExtractor.next(); assertThat(rows.isPresent(), is(true)); - assertThat(rows.get().size(), equalTo(3)); + assertThat(rows.get().length, equalTo(3)); - assertThat(rows.get().get(0).getValues(), equalTo(new String[] { "11", "21" })); - assertThat(rows.get().get(1).getValues(), is(nullValue())); - assertThat(rows.get().get(2).getValues(), equalTo(new String[] { "13", "23" })); + assertThat(dataExtractor.createRow(rows.get()[0]).getValues(), equalTo(new String[] { "11", "21" })); + assertThat(dataExtractor.createRow(rows.get()[1]).getValues(), is(nullValue())); + assertThat(dataExtractor.createRow(rows.get()[2]).getValues(), equalTo(new String[] { "13", "23" })); - assertThat(rows.get().get(0).shouldSkip(), is(false)); - assertThat(rows.get().get(1).shouldSkip(), is(true)); - assertThat(rows.get().get(2).shouldSkip(), is(false)); + assertThat(dataExtractor.createRow(rows.get()[0]).shouldSkip(), is(false)); + assertThat(dataExtractor.createRow(rows.get()[1]).shouldSkip(), is(true)); + assertThat(dataExtractor.createRow(rows.get()[2]).shouldSkip(), is(false)); assertThat(dataExtractor.hasNext(), is(true)); @@ -538,20 +538,20 @@ public void testExtractionWithProcessedFeatures() throws IOException { assertThat(dataExtractor.hasNext(), is(true)); // First batch - Optional> rows = dataExtractor.next(); + Optional rows = dataExtractor.next(); assertThat(rows.isPresent(), is(true)); - assertThat(rows.get().size(), equalTo(3)); + assertThat(rows.get().length, equalTo(3)); - assertThat(rows.get().get(0).getValues(), equalTo(new String[] { "21", "dog", "1", "0" })); + assertThat(dataExtractor.createRow(rows.get()[0]).getValues(), equalTo(new String[] { "21", "dog", "1", "0" })); assertThat( - rows.get().get(1).getValues(), + dataExtractor.createRow(rows.get()[1]).getValues(), equalTo(new String[] { "22", "dog", DataFrameDataExtractor.NULL_VALUE, DataFrameDataExtractor.NULL_VALUE }) ); - assertThat(rows.get().get(2).getValues(), equalTo(new String[] { "23", "dog", "0", "0" })); + assertThat(dataExtractor.createRow(rows.get()[2]).getValues(), equalTo(new String[] { "23", "dog", "0", "0" })); - assertThat(rows.get().get(0).shouldSkip(), is(false)); - assertThat(rows.get().get(1).shouldSkip(), is(false)); - assertThat(rows.get().get(2).shouldSkip(), is(false)); + assertThat(dataExtractor.createRow(rows.get()[0]).shouldSkip(), is(false)); + assertThat(dataExtractor.createRow(rows.get()[1]).shouldSkip(), is(false)); + assertThat(dataExtractor.createRow(rows.get()[2]).shouldSkip(), is(false)); } public void testExtractionWithMultipleScalarTypesInSource() throws IOException { @@ -577,17 +577,17 @@ public void testExtractionWithMultipleScalarTypesInSource() throws IOException { assertThat(dataExtractor.hasNext(), is(true)); // First batch - Optional> rows = dataExtractor.next(); + Optional rows = dataExtractor.next(); assertThat(rows.isPresent(), is(true)); - assertThat(rows.get().size(), equalTo(3)); + assertThat(rows.get().length, equalTo(3)); - assertThat(rows.get().get(0).getValues(), equalTo(new String[] { "1", "21", })); - assertThat(rows.get().get(1).getValues(), equalTo(new String[] { "true", "22" })); - assertThat(rows.get().get(2).getValues(), equalTo(new String[] { "false", "23" })); + assertThat(dataExtractor.createRow(rows.get()[0]).getValues(), equalTo(new String[] { "1", "21", })); + assertThat(dataExtractor.createRow(rows.get()[1]).getValues(), equalTo(new String[] { "true", "22" })); + assertThat(dataExtractor.createRow(rows.get()[2]).getValues(), equalTo(new String[] { "false", "23" })); - assertThat(rows.get().get(0).shouldSkip(), is(false)); - assertThat(rows.get().get(1).shouldSkip(), is(false)); - assertThat(rows.get().get(2).shouldSkip(), is(false)); + assertThat(dataExtractor.createRow(rows.get()[0]).shouldSkip(), is(false)); + assertThat(dataExtractor.createRow(rows.get()[1]).shouldSkip(), is(false)); + assertThat(dataExtractor.createRow(rows.get()[2]).shouldSkip(), is(false)); } public void testExtractionWithProcessedFieldThrows() { @@ -610,7 +610,7 @@ public void testExtractionWithProcessedFieldThrows() { assertThat(dataExtractor.hasNext(), is(true)); - expectThrows(RuntimeException.class, () -> dataExtractor.next()); + expectThrows(RuntimeException.class, () -> Arrays.stream(dataExtractor.next().get()).forEach(dataExtractor::createRow)); } private TestExtractor createExtractor(boolean includeSource, boolean supportsRowsWithMissingValues) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java index 3a95a3bb65f10..cb02b8294b115 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/dataframe/process/DataFrameRowsJoinerTests.java @@ -306,6 +306,7 @@ private void givenDataFrameBatches(List> batche DelegateStubDataExtractor delegateStubDataExtractor = new DelegateStubDataExtractor(batches); when(dataExtractor.hasNext()).thenAnswer(a -> delegateStubDataExtractor.hasNext()); when(dataExtractor.next()).thenAnswer(a -> delegateStubDataExtractor.next()); + when(dataExtractor.createRow(any(SearchHit.class))).thenAnswer(a -> delegateStubDataExtractor.makeRow(a.getArgument(0))); } private static SearchHit newHit(String json) { @@ -340,19 +341,32 @@ private void givenClientHasNoFailures() { private static class DelegateStubDataExtractor { - private final List> batches; + private final List batches; + private final Map rows = new HashMap<>(); private int batchIndex; - private DelegateStubDataExtractor(List> batches) { - this.batches = batches; + private DelegateStubDataExtractor(List> rows) { + batches = new ArrayList<>(rows.size()); + for (List batch : rows) { + List batchHits = new ArrayList<>(batch.size()); + for (DataFrameDataExtractor.Row row : batch) { + this.rows.put(row.getHit(), row); + batchHits.add(row.getHit()); + } + batches.add(batchHits.toArray(new SearchHit[0])); + } } public boolean hasNext() { return batchIndex < batches.size(); } - public Optional> next() { + public Optional next() { return Optional.of(batches.get(batchIndex++)); } + + public DataFrameDataExtractor.Row makeRow(SearchHit hit) { + return rows.get(hit); + } } } diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java index a7fb5571995b3..92b15cd9851db 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/full/PersistentCache.java @@ -437,6 +437,16 @@ static Map loadDocuments(Path directoryPath) throws IOExceptio } catch (IndexNotFoundException e) { logger.debug("persistent cache index does not exist yet", e); } + } catch (Exception e) { + if (e instanceof IllegalArgumentException iae) { + final var message = iae.getMessage(); + if (message != null && message.startsWith("indexCreatedVersionMajor is in the future:")) { + logger.warn("Deleting persistent cache index created in the future [message: {}]", message); + IOUtils.rm(directoryPath); + return Map.of(); + } + } + throw e; } return documents; } diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1EnrichUnavailableRemotesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1EnrichUnavailableRemotesIT.java index 19d742e32aa08..0ceffa984a979 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1EnrichUnavailableRemotesIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS1EnrichUnavailableRemotesIT.java @@ -187,7 +187,10 @@ private void esqlEnrichWithSkipUnavailableTrue() throws Exception { Map failuresMap = (Map) remoteClusterFailures.get(0); Map reason = (Map) failuresMap.get("reason"); - assertThat(reason.get("type").toString(), oneOf("node_disconnected_exception", "connect_transport_exception")); + assertThat( + reason.get("type").toString(), + oneOf("node_disconnected_exception", "connect_transport_exception", "node_not_connected_exception") + ); } finally { fulfillingCluster.start(); closeFulfillingClusterClient(); @@ -206,7 +209,11 @@ private void esqlEnrichWithSkipUnavailableFalse() throws Exception { assertThat( ex.getMessage(), - anyOf(containsString("connect_transport_exception"), containsString("node_disconnected_exception")) + anyOf( + containsString("connect_transport_exception"), + containsString("node_disconnected_exception"), + containsString("node_not_connected_exception") + ) ); } finally { fulfillingCluster.start(); diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2EnrichUnavailableRemotesIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2EnrichUnavailableRemotesIT.java index 3f068cf3a04cc..075ea86c22e98 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2EnrichUnavailableRemotesIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/CrossClusterEsqlRCS2EnrichUnavailableRemotesIT.java @@ -207,7 +207,10 @@ private void esqlEnrichWithSkipUnavailableTrue() throws Exception { Map failuresMap = (Map) remoteClusterFailures.get(0); Map reason = (Map) failuresMap.get("reason"); - assertThat(reason.get("type").toString(), oneOf("node_disconnected_exception", "connect_transport_exception")); + assertThat( + reason.get("type").toString(), + oneOf("node_disconnected_exception", "connect_transport_exception", "node_not_connected_exception") + ); } finally { fulfillingCluster.start(); closeFulfillingClusterClient(); @@ -225,7 +228,11 @@ private void esqlEnrichWithSkipUnavailableFalse() throws Exception { ResponseException ex = expectThrows(ResponseException.class, () -> performRequestWithRemoteSearchUser(esqlRequest(query))); assertThat( ex.getMessage(), - anyOf(containsString("connect_transport_exception"), containsString("node_disconnected_exception")) + anyOf( + containsString("connect_transport_exception"), + containsString("node_disconnected_exception"), + containsString("node_not_connected_exception") + ) ); } finally { fulfillingCluster.start(); diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java index 2039b72c4f49f..550578161302b 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/integration/DataStreamSecurityIT.java @@ -91,7 +91,7 @@ public ClusterState execute(ClusterState currentState) throws Exception { : original.getIndices().get(0).getName(); DataStream broken = original.copy() .setBackingIndices( - original.getBackingIndices() + original.getDataComponent() .copy() .setIndices(List.of(new Index(brokenIndexName, "broken"), original.getIndices().get(1))) .build() diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileCancellationIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileCancellationIntegTests.java index 87a5146113f72..d0aa95e37aeee 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileCancellationIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/profile/ProfileCancellationIntegTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.ResponseListener; import org.elasticsearch.cluster.metadata.IndexAbstraction; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexModule; @@ -409,7 +410,7 @@ public void authorizeIndexAction( RequestInfo requestInfo, AuthorizationInfo authorizationInfo, AsyncSupplier indicesAsyncSupplier, - Map aliasOrIndexLookup, + Metadata metadata, ActionListener listener ) { listener.onResponse(IndexAuthorizationResult.ALLOW_NO_INDICES); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 0dfdf0861e321..e36c96ab02562 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -511,7 +511,7 @@ private void authorizeAction( requestInfo, authzInfo, resolvedIndicesAsyncSupplier, - metadata.getIndicesLookup(), + metadata, wrapPreservingContext( new AuthorizationResultListener<>( result -> handleIndexActionAuthorizationResult( @@ -586,7 +586,7 @@ private void handleIndexActionAuthorizationResult( ResolvedIndices withAliases = new ResolvedIndices(aliasesAndIndices, Collections.emptyList()); l.onResponse(withAliases); })), - metadata.getIndicesLookup(), + metadata, wrapPreservingContext( new AuthorizationResultListener<>( authorizationResult -> runRequestInterceptors(requestInfo, authzInfo, authorizationEngine, listener), @@ -867,7 +867,7 @@ private void authorizeBulkItems( bulkItemInfo, authzInfo, ril -> ril.onResponse(new ResolvedIndices(new ArrayList<>(indices), Collections.emptyList())), - metadata.getIndicesLookup(), + metadata, groupedActionListener.delegateFailureAndWrap( (l, indexAuthorizationResult) -> l.onResponse(new Tuple<>(bulkItemAction, indexAuthorizationResult)) ) diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java index c5f4af93ab4b8..614401770cfb7 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/RBACEngine.java @@ -34,6 +34,7 @@ import org.elasticsearch.action.termvectors.MultiTermVectorsAction; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexAbstraction; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.regex.Regex; @@ -317,7 +318,7 @@ public void authorizeIndexAction( RequestInfo requestInfo, AuthorizationInfo authorizationInfo, AsyncSupplier indicesAsyncSupplier, - Map aliasOrIndexLookup, + Metadata metadata, ActionListener listener ) { final String action = requestInfo.getAction(); @@ -422,7 +423,7 @@ public void authorizeIndexAction( .allMatch(IndicesAliasesRequest.AliasActions::expandAliasesWildcards)) : "expanded wildcards for local indices OR the request should not expand wildcards at all"; - IndexAuthorizationResult result = buildIndicesAccessControl(action, role, resolvedIndices, aliasOrIndexLookup); + IndexAuthorizationResult result = buildIndicesAccessControl(action, role, resolvedIndices, metadata); if (requestInfo.getAuthentication().isCrossClusterAccess() && request instanceof IndicesRequest.RemoteClusterShardRequest shardsRequest && shardsRequest.shards() != null) { @@ -882,7 +883,7 @@ static AuthorizedIndices resolveAuthorizedIndicesFromRole( indicesAndAliases.add(index.getName()); } // TODO: We need to limit if a data stream's failure indices should return here. - for (Index index : ((DataStream) indexAbstraction).getFailureIndices().getIndices()) { + for (Index index : ((DataStream) indexAbstraction).getFailureIndices()) { indicesAndAliases.add(index.getName()); } } @@ -917,12 +918,12 @@ private IndexAuthorizationResult buildIndicesAccessControl( String action, Role role, ResolvedIndices resolvedIndices, - Map aliasAndIndexLookup + Metadata metadata ) { final IndicesAccessControl accessControl = role.authorize( action, Sets.newHashSet(resolvedIndices.getLocal()), - aliasAndIndexLookup, + metadata, fieldPermissionsCache ); return new IndexAuthorizationResult(accessControl); diff --git a/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml index 98c6b81553572..636627240bf4c 100644 --- a/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml @@ -1,2 +1,12 @@ org.elasticsearch.security: - set_https_connection_properties # for CommandLineHttpClient +io.netty.transport: + - inbound_network + - outbound_network +io.netty.common: + - inbound_network + - outbound_network +org.opensaml.xmlsec.impl: + - write_system_properties: + properties: + - org.apache.xml.security.ignoreLineBreaks diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index d3b0d3b2e1faa..4f0e15eec7708 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -3416,7 +3416,7 @@ public void authorizeIndexAction( RequestInfo requestInfo, AuthorizationInfo authorizationInfo, AsyncSupplier indicesAsyncSupplier, - Map aliasOrIndexLookup, + Metadata metadata, ActionListener listener ) { throw new UnsupportedOperationException("not implemented"); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java index a0d63d26d475f..f7dc725c3f07d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolverTests.java @@ -2387,7 +2387,7 @@ public void testBackingIndicesAreVisibleWhenIncludedByRequestWithWildcard() { assertThat(authorizedIndices.all().get(), hasItem(i.getName())); assertThat(authorizedIndices.check(i.getName()), is(true)); } - for (Index i : dataStream.getFailureIndices().getIndices()) { + for (Index i : dataStream.getFailureIndices()) { assertThat(authorizedIndices.all().get(), hasItem(i.getName())); assertThat(authorizedIndices.check(i.getName()), is(true)); } @@ -2427,7 +2427,7 @@ public void testBackingIndicesAreNotVisibleWhenNotIncludedByRequestWithoutWildca assertThat(authorizedIndices.all().get(), hasItem(i.getName())); assertThat(authorizedIndices.check(i.getName()), is(true)); } - for (Index i : dataStream.getFailureIndices().getIndices()) { + for (Index i : dataStream.getFailureIndices()) { assertThat(authorizedIndices.all().get(), hasItem(i.getName())); assertThat(authorizedIndices.check(i.getName()), is(true)); } @@ -2444,7 +2444,7 @@ public void testBackingIndicesAreNotVisibleWhenNotIncludedByRequestWithoutWildca for (Index i : dataStream.getIndices()) { assertThat(resolvedIndices.getLocal(), hasItem(i.getName())); } - for (Index i : dataStream.getFailureIndices().getIndices()) { + for (Index i : dataStream.getFailureIndices()) { assertThat(resolvedIndices.getLocal(), hasItem(i.getName())); } } @@ -2470,7 +2470,7 @@ public void testDataStreamNotAuthorizedWhenBackingIndicesAreAuthorizedViaWildcar assertThat(authorizedIndices.all().get(), hasItem(i.getName())); assertThat(authorizedIndices.check(i.getName()), is(true)); } - for (Index i : dataStream.getFailureIndices().getIndices()) { + for (Index i : dataStream.getFailureIndices()) { assertThat(authorizedIndices.all().get(), hasItem(i.getName())); assertThat(authorizedIndices.check(i.getName()), is(true)); } @@ -2542,7 +2542,7 @@ public void testDataStreamNotAuthorizedWhenBackingIndicesAreAuthorizedViaWildcar assertThat(authorizedIndices.all().get(), hasItem(i.getName())); assertThat(authorizedIndices.check(i.getName()), is(true)); } - for (Index i : dataStream.getFailureIndices().getIndices()) { + for (Index i : dataStream.getFailureIndices()) { assertThat(authorizedIndices.all().get(), hasItem(i.getName())); assertThat(authorizedIndices.check(i.getName()), is(true)); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java index a41c54ada781a..482715bb74c83 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/RBACEngineTests.java @@ -26,6 +26,7 @@ import org.elasticsearch.cluster.metadata.DataStreamTestHelper; import org.elasticsearch.cluster.metadata.IndexAbstraction; import org.elasticsearch.cluster.metadata.IndexMetadata; +import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -1971,17 +1972,16 @@ private void authorizeIndicesAction( final RequestInfo requestInfo = createRequestInfo(searchRequest, action, parentAuthorization); final AsyncSupplier indicesAsyncSupplier = s -> s.onResponse(resolvedIndices); - final Map aliasOrIndexLookup = Stream.of(indices) - .collect( - Collectors.toMap( - i -> i, - v -> new IndexAbstraction.ConcreteIndex( - IndexMetadata.builder(v).settings(indexSettings(IndexVersion.current(), 1, 0)).build() - ) + Metadata.Builder metadata = Metadata.builder(); + Stream.of(indices) + .forEach( + indexName -> metadata.put( + IndexMetadata.builder(indexName).settings(indexSettings(IndexVersion.current(), 1, 0)).build(), + false ) ); - engine.authorizeIndexAction(requestInfo, authzInfo, indicesAsyncSupplier, aliasOrIndexLookup, listener); + engine.authorizeIndexAction(requestInfo, authzInfo, indicesAsyncSupplier, metadata.build(), listener); } private static RequestInfo createRequestInfo(TransportRequest request, String action, ParentActionAuthorization parentAuthorization) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java index ac834911fc4e6..4488c28750dc0 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java @@ -46,7 +46,6 @@ import java.util.Collections; import java.util.List; import java.util.Set; -import java.util.SortedMap; import java.util.stream.Collectors; import static org.elasticsearch.common.settings.Settings.builder; @@ -66,7 +65,6 @@ public void testAuthorize() { .putAlias(AliasMetadata.builder("_alias")); Metadata md = Metadata.builder().put(imbBuilder).build(); FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); - SortedMap lookup = md.getIndicesLookup(); // basics: Set query = Collections.singleton(new BytesArray("{}")); @@ -77,7 +75,7 @@ public void testAuthorize() { IndicesAccessControl permissions = role.authorize( TransportSearchAction.TYPE.name(), Sets.newHashSet("_index"), - lookup, + md, fieldPermissionsCache ); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); @@ -91,7 +89,7 @@ public void testAuthorize() { role = Role.builder(RESTRICTED_INDICES, "_role") .add(new FieldPermissions(fieldPermissionDef(fields, null)), null, IndexPrivilege.ALL, randomBoolean(), "_index") .build(); - permissions = role.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("_index"), lookup, fieldPermissionsCache); + permissions = role.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("_index"), md, fieldPermissionsCache); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().grantsAccessTo("_field")); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); @@ -102,7 +100,7 @@ public void testAuthorize() { role = Role.builder(RESTRICTED_INDICES, "_role") .add(FieldPermissions.DEFAULT, query, IndexPrivilege.ALL, randomBoolean(), "_index") .build(); - permissions = role.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("_index"), lookup, fieldPermissionsCache); + permissions = role.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("_index"), md, fieldPermissionsCache); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertFalse(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); assertThat(permissions.getIndexPermissions("_index").getDocumentPermissions().hasDocumentLevelPermissions(), is(true)); @@ -113,7 +111,7 @@ public void testAuthorize() { role = Role.builder(RESTRICTED_INDICES, "_role") .add(new FieldPermissions(fieldPermissionDef(fields, null)), query, IndexPrivilege.ALL, randomBoolean(), "_alias") .build(); - permissions = role.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("_alias"), lookup, fieldPermissionsCache); + permissions = role.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("_alias"), md, fieldPermissionsCache); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().grantsAccessTo("_field")); assertTrue(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); @@ -137,7 +135,7 @@ public void testAuthorize() { role = Role.builder(RESTRICTED_INDICES, "_role") .add(new FieldPermissions(fieldPermissionDef(allFields, null)), query, IndexPrivilege.ALL, randomBoolean(), "_alias") .build(); - permissions = role.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("_alias"), lookup, fieldPermissionsCache); + permissions = role.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("_alias"), md, fieldPermissionsCache); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertFalse(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); assertThat(permissions.getIndexPermissions("_index").getDocumentPermissions().hasDocumentLevelPermissions(), is(true)); @@ -154,7 +152,6 @@ public void testAuthorize() { .settings(indexSettings(IndexVersion.current(), 1, 1)) .putAlias(AliasMetadata.builder("_alias")); md = Metadata.builder(md).put(imbBuilder1).build(); - lookup = md.getIndicesLookup(); // match all fields with more than one permission Set fooQuery = Collections.singleton(new BytesArray("{foo}")); @@ -163,7 +160,7 @@ public void testAuthorize() { .add(new FieldPermissions(fieldPermissionDef(allFields, null)), fooQuery, IndexPrivilege.ALL, randomBoolean(), "_alias") .add(new FieldPermissions(fieldPermissionDef(allFields, null)), query, IndexPrivilege.ALL, randomBoolean(), "_alias") .build(); - permissions = role.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("_alias"), lookup, fieldPermissionsCache); + permissions = role.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("_alias"), md, fieldPermissionsCache); Set bothQueries = Sets.union(fooQuery, query); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); assertFalse(permissions.getIndexPermissions("_index").getFieldPermissions().hasFieldLevelSecurity()); @@ -191,7 +188,6 @@ public void testAuthorizeMultipleGroupsMixedDls() { .putAlias(AliasMetadata.builder("_alias")); Metadata md = Metadata.builder().put(imbBuilder).build(); FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); - SortedMap lookup = md.getIndicesLookup(); Set query = Collections.singleton(new BytesArray("{}")); String[] fields = new String[] { "_field" }; @@ -202,7 +198,7 @@ public void testAuthorizeMultipleGroupsMixedDls() { IndicesAccessControl permissions = role.authorize( TransportSearchAction.TYPE.name(), Sets.newHashSet("_index"), - lookup, + md, fieldPermissionsCache ); assertThat(permissions.getIndexPermissions("_index"), notNullValue()); @@ -253,7 +249,6 @@ public void testCorePermissionAuthorize() { new IndexMetadata.Builder("a1").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true ).put(new IndexMetadata.Builder("a2").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true).build(); - SortedMap lookup = metadata.getIndicesLookup(); FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); IndicesPermission core = new IndicesPermission.Builder(RESTRICTED_INDICES).addGroup( @@ -274,7 +269,7 @@ public void testCorePermissionAuthorize() { IndicesAccessControl iac = core.authorize( TransportSearchAction.TYPE.name(), Sets.newHashSet("a1", "ba"), - lookup, + metadata, fieldPermissionsCache ); assertTrue(iac.getIndexPermissions("a1").getFieldPermissions().grantsAccessTo("denied_field")); @@ -317,7 +312,7 @@ public void testCorePermissionAuthorize() { "a2" ) .build(); - iac = core.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("a1", "a2"), lookup, fieldPermissionsCache); + iac = core.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("a1", "a2"), metadata, fieldPermissionsCache); assertFalse(iac.getIndexPermissions("a1").getFieldPermissions().hasFieldLevelSecurity()); assertFalse(iac.getIndexPermissions("a2").getFieldPermissions().grantsAccessTo("denied_field2")); assertFalse(iac.getIndexPermissions("a2").getFieldPermissions().grantsAccessTo("denied_field")); @@ -368,7 +363,6 @@ public void testSecurityIndicesPermissions() { true ).build(); FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); - SortedMap lookup = metadata.getIndicesLookup(); // allow_restricted_indices: false IndicesPermission indicesPermission = new IndicesPermission.Builder(RESTRICTED_INDICES).addGroup( @@ -381,7 +375,7 @@ public void testSecurityIndicesPermissions() { IndicesAccessControl iac = indicesPermission.authorize( TransportSearchAction.TYPE.name(), Sets.newHashSet(internalSecurityIndex, SecuritySystemIndices.SECURITY_MAIN_ALIAS), - lookup, + metadata, fieldPermissionsCache ); assertThat(iac.isGranted(), is(false)); @@ -401,7 +395,7 @@ public void testSecurityIndicesPermissions() { iac = indicesPermission.authorize( TransportSearchAction.TYPE.name(), Sets.newHashSet(internalSecurityIndex, SecuritySystemIndices.SECURITY_MAIN_ALIAS), - lookup, + metadata, fieldPermissionsCache ); assertThat(iac.isGranted(), is(true)); @@ -419,7 +413,6 @@ public void testAsyncSearchIndicesPermissions() { true ).build(); FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); - SortedMap lookup = metadata.getIndicesLookup(); // allow_restricted_indices: false IndicesPermission indicesPermission = new IndicesPermission.Builder(RESTRICTED_INDICES).addGroup( @@ -432,7 +425,7 @@ public void testAsyncSearchIndicesPermissions() { IndicesAccessControl iac = indicesPermission.authorize( TransportSearchAction.TYPE.name(), Sets.newHashSet(asyncSearchIndex), - lookup, + metadata, fieldPermissionsCache ); assertThat(iac.isGranted(), is(false)); @@ -450,7 +443,7 @@ public void testAsyncSearchIndicesPermissions() { iac = indicesPermission.authorize( TransportSearchAction.TYPE.name(), Sets.newHashSet(asyncSearchIndex), - lookup, + metadata, fieldPermissionsCache ); assertThat(iac.isGranted(), is(true)); @@ -477,7 +470,6 @@ public void testAuthorizationForBackingIndices() { Metadata metadata = builder.build(); FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); - SortedMap lookup = metadata.getIndicesLookup(); IndicesPermission indicesPermission = new IndicesPermission.Builder(RESTRICTED_INDICES).addGroup( IndexPrivilege.READ, FieldPermissions.DEFAULT, @@ -488,7 +480,7 @@ public void testAuthorizationForBackingIndices() { IndicesAccessControl iac = indicesPermission.authorize( TransportSearchAction.TYPE.name(), Sets.newHashSet(backingIndices.stream().map(im -> im.getIndex().getName()).collect(Collectors.toList())), - lookup, + metadata, fieldPermissionsCache ); @@ -508,7 +500,7 @@ public void testAuthorizationForBackingIndices() { iac = indicesPermission.authorize( randomFrom(TransportPutMappingAction.TYPE.name(), TransportAutoPutMappingAction.TYPE.name()), Sets.newHashSet(backingIndices.stream().map(im -> im.getIndex().getName()).collect(Collectors.toList())), - lookup, + metadata, fieldPermissionsCache ); @@ -521,7 +513,7 @@ public void testAuthorizationForBackingIndices() { public void testAuthorizationForMappingUpdates() { final Settings indexSettings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersion.current()).build(); - final Metadata.Builder metadata = new Metadata.Builder().put( + final Metadata.Builder metadataBuilder = new Metadata.Builder().put( new IndexMetadata.Builder("test1").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true ).put(new IndexMetadata.Builder("test_write1").settings(indexSettings).numberOfShards(1).numberOfReplicas(0).build(), true); @@ -535,12 +527,12 @@ public void testAuthorizationForMappingUpdates() { "test_write2", backingIndices.stream().map(IndexMetadata::getIndex).collect(Collectors.toList()) ); - metadata.put(ds); + metadataBuilder.put(ds); for (IndexMetadata index : backingIndices) { - metadata.put(index, false); + metadataBuilder.put(index, false); } - SortedMap lookup = metadata.build().getIndicesLookup(); + Metadata metadata = metadataBuilder.build(); FieldPermissionsCache fieldPermissionsCache = new FieldPermissionsCache(Settings.EMPTY); IndicesPermission core = new IndicesPermission.Builder(RESTRICTED_INDICES).addGroup( @@ -561,7 +553,7 @@ public void testAuthorizationForMappingUpdates() { IndicesAccessControl iac = core.authorize( TransportPutMappingAction.TYPE.name(), Sets.newHashSet("test1", "test_write1"), - lookup, + metadata, fieldPermissionsCache ); assertThat(iac.isGranted(), is(true)); @@ -589,7 +581,7 @@ public void testAuthorizationForMappingUpdates() { iac = core.authorize( TransportAutoPutMappingAction.TYPE.name(), Sets.newHashSet("test1", "test_write1"), - lookup, + metadata, fieldPermissionsCache ); assertThat(iac.isGranted(), is(true)); @@ -605,17 +597,17 @@ public void testAuthorizationForMappingUpdates() { + "users who require access to update mappings must be granted explicit privileges" ); - iac = core.authorize(TransportAutoPutMappingAction.TYPE.name(), Sets.newHashSet("test_write2"), lookup, fieldPermissionsCache); + iac = core.authorize(TransportAutoPutMappingAction.TYPE.name(), Sets.newHashSet("test_write2"), metadata, fieldPermissionsCache); assertThat(iac.isGranted(), is(true)); assertThat(iac.getIndexPermissions("test_write2"), is(notNullValue())); assertThat(iac.hasIndexPermissions("test_write2"), is(true)); - iac = core.authorize(TransportPutMappingAction.TYPE.name(), Sets.newHashSet("test_write2"), lookup, fieldPermissionsCache); + iac = core.authorize(TransportPutMappingAction.TYPE.name(), Sets.newHashSet("test_write2"), metadata, fieldPermissionsCache); assertThat(iac.getIndexPermissions("test_write2"), is(nullValue())); assertThat(iac.hasIndexPermissions("test_write2"), is(false)); iac = core.authorize( TransportAutoPutMappingAction.TYPE.name(), Sets.newHashSet(backingIndices.stream().map(im -> im.getIndex().getName()).collect(Collectors.toList())), - lookup, + metadata, fieldPermissionsCache ); assertThat(iac.isGranted(), is(true)); @@ -626,7 +618,7 @@ public void testAuthorizationForMappingUpdates() { iac = core.authorize( TransportPutMappingAction.TYPE.name(), Sets.newHashSet(backingIndices.stream().map(im -> im.getIndex().getName()).collect(Collectors.toList())), - lookup, + metadata, fieldPermissionsCache ); assertThat(iac.isGranted(), is(false)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java index cef3572ee3ac4..ed173d8e2b127 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStoreTests.java @@ -991,8 +991,7 @@ public void testMergingRolesWithFls() { true ) .build(); - IndicesAccessControl iac = role.indices() - .authorize("indices:data/read/search", Collections.singleton("test"), metadata.getIndicesLookup(), cache); + IndicesAccessControl iac = role.indices().authorize("indices:data/read/search", Collections.singleton("test"), metadata, cache); assertTrue(iac.getIndexPermissions("test").getFieldPermissions().grantsAccessTo("L1.foo")); assertFalse(iac.getIndexPermissions("test").getFieldPermissions().grantsAccessTo("L2.foo")); assertTrue(iac.getIndexPermissions("test").getFieldPermissions().grantsAccessTo("L3.foo")); @@ -2306,18 +2305,15 @@ public void testGetRoleForCrossClusterAccessAuthentication() throws Exception { .build(); final var emptyCache = new FieldPermissionsCache(Settings.EMPTY); assertThat( - role.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("index1"), indexMetadata.getIndicesLookup(), emptyCache) - .isGranted(), + role.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("index1"), indexMetadata, emptyCache).isGranted(), is(false == emptyRemoteRole) ); assertThat( - role.authorize(TransportCreateIndexAction.TYPE.name(), Sets.newHashSet("index1"), indexMetadata.getIndicesLookup(), emptyCache) - .isGranted(), + role.authorize(TransportCreateIndexAction.TYPE.name(), Sets.newHashSet("index1"), indexMetadata, emptyCache).isGranted(), is(false) ); assertThat( - role.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("index2"), indexMetadata.getIndicesLookup(), emptyCache) - .isGranted(), + role.authorize(TransportSearchAction.TYPE.name(), Sets.newHashSet("index2"), indexMetadata, emptyCache).isGranted(), is(false) ); } diff --git a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java index 44e68c2ebea74..65e54513e8c9e 100644 --- a/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java +++ b/x-pack/plugin/spatial/src/main/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapper.java @@ -6,10 +6,16 @@ */ package org.elasticsearch.xpack.spatial.index.mapper; +import org.apache.lucene.document.Field; +import org.apache.lucene.document.FieldType; import org.apache.lucene.document.StoredField; import org.apache.lucene.document.XYDocValuesField; import org.apache.lucene.document.XYPointField; +import org.apache.lucene.geo.XYEncodingUtils; +import org.apache.lucene.index.DocValuesType; import org.apache.lucene.search.Query; +import org.apache.lucene.util.BytesRef; +import org.apache.lucene.util.NumericUtils; import org.elasticsearch.common.Explicit; import org.elasticsearch.common.geo.GeometryFormatterFactory; import org.elasticsearch.common.geo.ShapeRelation; @@ -146,15 +152,17 @@ public PointFieldMapper( @Override protected void index(DocumentParserContext context, CartesianPoint point) { - if (fieldType().isIndexed()) { - context.doc().add(new XYPointField(fieldType().name(), (float) point.getX(), (float) point.getY())); - } - if (fieldType().hasDocValues()) { + final boolean indexed = fieldType().isIndexed(); + final boolean hasDocValues = fieldType().hasDocValues(); + final boolean store = fieldType().isStored(); + if (indexed && hasDocValues) { + context.doc().add(new XYFieldWithDocValues(fieldType().name(), (float) point.getX(), (float) point.getY())); + } else if (hasDocValues) { context.doc().add(new XYDocValuesField(fieldType().name(), (float) point.getX(), (float) point.getY())); - } else if (fieldType().isStored() || fieldType().isIndexed()) { - context.addToFieldNames(fieldType().name()); + } else if (indexed) { + context.doc().add(new XYPointField(fieldType().name(), (float) point.getX(), (float) point.getY())); } - if (fieldType().isStored()) { + if (store) { context.doc().add(new StoredField(fieldType().name(), point.toString())); } } @@ -256,4 +264,61 @@ public CartesianPoint normalizeFromSource(CartesianPoint point) { return point; } } + + /** + * Utility class that allows adding index and doc values in one field + */ + static class XYFieldWithDocValues extends Field { + + private static final FieldType TYPE = new FieldType(); + + static { + TYPE.setDimensions(2, Integer.BYTES); + TYPE.setDocValuesType(DocValuesType.SORTED_NUMERIC); + TYPE.freeze(); + } + + // holds the doc value value. + private final long docValue; + + XYFieldWithDocValues(String name, float x, float y) { + super(name, TYPE); + final byte[] bytes; + if (fieldsData == null) { + bytes = new byte[8]; + fieldsData = new BytesRef(bytes); + } else { + bytes = ((BytesRef) fieldsData).bytes; + } + + int xEncoded = XYEncodingUtils.encode(x); + int yEncoded = XYEncodingUtils.encode(y); + NumericUtils.intToSortableBytes(xEncoded, bytes, 0); + NumericUtils.intToSortableBytes(yEncoded, bytes, 4); + + docValue = (((long) xEncoded) << 32) | (yEncoded & 0xFFFFFFFFL); + } + + @Override + public Number numericValue() { + return docValue; + } + + @Override + public String toString() { + StringBuilder result = new StringBuilder(); + result.append(getClass().getSimpleName()); + result.append(" <"); + result.append(name); + result.append(':'); + + byte[] bytes = ((BytesRef) fieldsData).bytes; + result.append(XYEncodingUtils.decode(bytes, 0)); + result.append(','); + result.append(XYEncodingUtils.decode(bytes, Integer.BYTES)); + + result.append('>'); + return result.toString(); + } + } } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java index c06a3201b3eaf..30a30bde51528 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldMapperTests.java @@ -7,7 +7,6 @@ package org.elasticsearch.xpack.spatial.index.mapper; import org.apache.lucene.document.XYDocValuesField; -import org.apache.lucene.document.XYPointField; import org.apache.lucene.geo.GeoEncodingUtils; import org.apache.lucene.index.IndexableField; import org.apache.lucene.util.BytesRef; @@ -55,7 +54,7 @@ protected String getFieldName() { @Override protected void assertXYPointField(IndexableField field, float x, float y) { // Unfortunately XYPointField and parent classes like IndexableField do not define equals, so we use toString - assertThat(field.toString(), is(new XYPointField(FIELD_NAME, 2000.1f, 305.6f).toString())); + assertThat(field.toString(), is(new PointFieldMapper.XYFieldWithDocValues(FIELD_NAME, 2000.1f, 305.6f).toString())); } /** The GeoJSON parser used by 'point' and 'geo_point' mimic the required fields of the GeoJSON parser */ @@ -182,7 +181,7 @@ public void testArrayStored() throws Exception { SourceToParse sourceToParse = source(b -> b.startArray(FIELD_NAME).value(1.3).value(1.2).endArray()); ParsedDocument doc = mapper.parse(sourceToParse); assertThat(doc.rootDoc().getField(FIELD_NAME), notNullValue()); - assertThat(doc.rootDoc().getFields(FIELD_NAME), hasSize(3)); + assertThat(doc.rootDoc().getFields(FIELD_NAME), hasSize(2)); } public void testArrayArrayStored() throws Exception { diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/30_synthetic_source.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/30_synthetic_source.yml new file mode 100644 index 0000000000000..7ade369893f4b --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/30_synthetic_source.yml @@ -0,0 +1,123 @@ +setup: + - requires: + cluster_features: ["mapper.counted_keyword.synthetic_source_native_support"] + reason: "Feature implemented" + + - do: + indices.create: + index: test-events + body: + settings: + index: + mapping.source.mode: synthetic + mappings: + properties: + events: + type: counted_keyword + synthetic_source_keep: none + + + - do: + index: + index: test-events + id: "1" + body: { "events": [ "a", "b", "a", "c" ] } + + - do: + index: + index: test-events + id: "2" + body: { "events": ["b", "b", "c", "a", "b"] } + + - do: + index: + index: test-events + id: "3" + body: { "events": ["c", "a", null, "b", null, "c"]} + + - do: + index: + index: test-events + id: "4" + body: { "events": ["a"]} + + - do: + index: + index: test-events + id: "5" + body: { "events": []} + + - do: + index: + index: test-events + id: "6" + body: { "events": [null, null]} + + - do: + indices.refresh: { } + +--- +"Source values are mutated as expected": + - do: + search: + index: test-events + body: + query: + ids: + values: [1] + - match: + hits.hits.0._source: + events: ["a", "a", "b", "c"] + + - do: + search: + index: test-events + body: + query: + ids: + values: [2] + - match: + hits.hits.0._source: + events: ["a", "b", "b", "b", "c"] + + - do: + search: + index: test-events + body: + query: + ids: + values: [3] + - match: + hits.hits.0._source: + events: ["a", "b", "c", "c"] + + - do: + search: + index: test-events + body: + query: + ids: + values: [4] + - match: + hits.hits.0._source: + events: "a" + + - do: + search: + index: test-events + body: + query: + ids: + values: [5] + - match: + hits.hits.0._source: {} + + - do: + search: + index: test-events + body: + query: + ids: + values: [6] + - match: + hits.hits.0._source: {} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml index e7cda33896149..5f0f4e33a9ea0 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/190_lookup_join.yml @@ -20,7 +20,7 @@ setup: type: keyword - do: indices.create: - index: test-lookup + index: test-lookup-1 body: settings: index: @@ -32,13 +32,32 @@ setup: type: long color: type: keyword + - do: + indices.create: + index: test-lookup-2 + body: + settings: + index: + mode: lookup + mappings: + properties: + key: + type: long + color: + type: keyword - do: indices.update_aliases: body: actions: - add: - index: test-lookup + index: test-lookup-1 alias: test-lookup-alias + - add: + index: test-lookup-* + alias: test-lookup-alias-pattern-multiple + - add: + index: test-lookup-1* + alias: test-lookup-alias-pattern-single - do: bulk: index: "test" @@ -50,7 +69,7 @@ setup: - { "key": 2, "color": "blue" } - do: bulk: - index: "test-lookup" + index: "test-lookup-1" refresh: true body: - { "index": { } } @@ -63,7 +82,7 @@ basic: - do: esql.query: body: - query: 'FROM test | SORT key | LOOKUP JOIN `test-lookup` ON key | LIMIT 3' + query: 'FROM test | SORT key | LOOKUP JOIN `test-lookup-1` ON key | LIMIT 3' - match: {columns.0.name: "key"} - match: {columns.0.type: "long"} @@ -77,11 +96,11 @@ non-lookup index: - do: esql.query: body: - query: 'FROM test-lookup | SORT key | LOOKUP JOIN `test` ON key | LIMIT 3' + query: 'FROM test-lookup-1 | SORT key | LOOKUP JOIN `test` ON key | LIMIT 3' catch: "bad_request" - match: { error.type: "verification_exception" } - - contains: { error.reason: "Found 1 problem\nline 1:43: invalid [test] resolution in lookup mode to an index in [standard] mode" } + - contains: { error.reason: "Found 1 problem\nline 1:45: invalid [test] resolution in lookup mode to an index in [standard] mode" } --- alias: @@ -116,7 +135,32 @@ alias-repeated-index: - do: esql.query: body: - query: 'FROM test-lookup | SORT key | LOOKUP JOIN `test-lookup-alias` ON key | LIMIT 3' + query: 'FROM test-lookup-1 | SORT key | LOOKUP JOIN `test-lookup-alias` ON key | LIMIT 3' + + - match: {columns.0.name: "key"} + - match: {columns.0.type: "long"} + - match: {columns.1.name: "color"} + - match: {columns.1.type: "keyword"} + - match: {values.0: [1, "cyan"]} + - match: {values.1: [2, "yellow"]} + +--- +alias-pattern-multiple: + - do: + esql.query: + body: + query: 'FROM test-lookup-1 | LOOKUP JOIN `test-lookup-alias-pattern-multiple` ON key' + catch: "bad_request" + + - match: { error.type: "verification_exception" } + - contains: { error.reason: "Found 1 problem\nline 1:34: invalid [test-lookup-alias-pattern-multiple] resolution in lookup mode to [2] indices" } + +--- +alias-pattern-single: + - do: + esql.query: + body: + query: 'FROM test | SORT key | LOOKUP JOIN `test-lookup-alias-pattern-single` ON key | LIMIT 3' - match: {columns.0.name: "key"} - match: {columns.0.type: "long"} diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml index 0d9c66012dbfc..da8290a1e185d 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/60_usage.yml @@ -92,7 +92,7 @@ setup: - gt: {esql.functions.to_long: $functions_to_long} - match: {esql.functions.coalesce: $functions_coalesce} # Testing for the entire function set isn't feasbile, so we just check that we return the correct count as an approximation. - - length: {esql.functions: 133} # check the "sister" test below for a likely update to the same esql.functions length check + - length: {esql.functions: 134} # check the "sister" test below for a likely update to the same esql.functions length check --- "Basic ESQL usage output (telemetry) non-snapshot version": diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java index 84519166eddb6..dc8dfe377a844 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformIT.java @@ -39,6 +39,7 @@ import java.time.Instant; import java.util.HashMap; import java.util.Map; +import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; @@ -50,6 +51,7 @@ import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasKey; import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.in; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.not; @@ -245,10 +247,8 @@ public void testDestinationIndexBlocked() throws Exception { assertAcknowledged(adminClient().performRequest(request)); // index more docs so the checkpoint tries to run, wait until transform stops - assertBusy(() -> { - indexDoc(42, sourceIndexName); - assertEquals(TransformStats.State.WAITING.value(), getTransformState(transformId)); - }, 30, TimeUnit.SECONDS); + indexDoc(42, sourceIndexName); + assertBusy(() -> { assertEquals(TransformStats.State.WAITING.value(), getTransformState(transformId)); }, 30, TimeUnit.SECONDS); // unblock index request = new Request("PUT", destIndexName + "/_settings"); @@ -266,6 +266,46 @@ public void testDestinationIndexBlocked() throws Exception { deleteTransform(transformId); } + public void testUnblockWithNewDestinationIndex() throws Exception { + var transformId = "transform-continuous-unblock-destination"; + var sourceIndexName = "source-reviews"; + var destIndexName = "destination-reviews-old"; + var newDestIndexName = "destination-reviews-new"; + + // create transform & indices, wait until 1st checkpoint is finished + createReviewsIndex(newDestIndexName, 100, NUM_USERS, TransformIT::getUserIdForRow, TransformIT::getDateStringForRow); + createContinuousTransform(sourceIndexName, transformId, destIndexName); + + // block destination index + Request request = new Request("PUT", destIndexName + "/_block/write"); + assertAcknowledged(adminClient().performRequest(request)); + + // index more docs so the checkpoint tries to run, wait until transform stops + indexDoc(42, sourceIndexName); + assertBusy(() -> { assertEquals(TransformStats.State.WAITING.value(), getTransformState(transformId)); }, 30, TimeUnit.SECONDS); + + // change destination index + var update = format(""" + { + "description": "updated config", + "dest": { + "index": "%s" + } + } + """, newDestIndexName); + updateConfig(transformId, update, true, RequestOptions.DEFAULT); + + assertBusy(() -> { + assertThat( + getTransformState(transformId), + in(Set.of(TransformStats.State.STARTED.value(), TransformStats.State.INDEXING.value())) + ); + }, 30, TimeUnit.SECONDS); + + stopTransform(transformId); + deleteTransform(transformId); + } + public void testTransformLifecycleInALoop() throws Exception { String transformId = "lifecycle-in-a-loop"; String indexName = transformId + "-src"; diff --git a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java index 4b7e478dbb61d..3ee46e0ff087f 100644 --- a/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java +++ b/x-pack/plugin/transform/qa/multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/transform/integration/TransformRestTestCase.java @@ -437,7 +437,7 @@ protected void createReviewsIndex( Request req = new Request("PUT", indexName); req.setEntity(indexMappings); req.setOptions(RequestOptions.DEFAULT); - assertAcknowledged(adminClient().performRequest(req)); + assertOKAndConsume(adminClient().performRequest(req)); } // create index diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java index ecd93ae1ae721..4d5dacde6efcb 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/action/TransportUpdateTransformAction.java @@ -171,7 +171,8 @@ protected void doExecute(Task task, Request request, ActionListener li boolean updateChangesSettings = update.changesSettings(originalConfig); boolean updateChangesHeaders = update.changesHeaders(originalConfig); - if (updateChangesSettings || updateChangesHeaders) { + boolean updateChangesDestIndex = update.changesDestIndex(originalConfig); + if (updateChangesSettings || updateChangesHeaders || updateChangesDestIndex) { PersistentTasksCustomMetadata.PersistentTask transformTask = TransformTask.getTransformTask( request.getId(), clusterState @@ -256,6 +257,7 @@ protected void taskOperation( ) { transformTask.applyNewSettings(request.getConfig().getSettings()); transformTask.applyNewAuthState(request.getAuthState()); + transformTask.checkAndResetDestinationIndexBlock(request.getConfig()); listener.onResponse(new Response(request.getConfig())); } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java index 7c9a22aa9fbfe..e530a3db83045 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/TransformTask.java @@ -35,6 +35,7 @@ import org.elasticsearch.xpack.core.transform.transforms.SettingsConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo.TransformCheckpointingInfoBuilder; +import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerPosition; import org.elasticsearch.xpack.core.transform.transforms.TransformIndexerStats; import org.elasticsearch.xpack.core.transform.transforms.TransformState; @@ -415,6 +416,16 @@ public void applyNewAuthState(AuthorizationState authState) { } } + public void checkAndResetDestinationIndexBlock(TransformConfig config) { + if (context.isWaitingForIndexToUnblock()) { + var currentIndex = getIndexer() == null ? null : getIndexer().getConfig().getDestination().getIndex(); + var updatedIndex = config.getDestination().getIndex(); + if (updatedIndex.equals(currentIndex) == false) { + context.setIsWaitingForIndexToUnblock(false); + } + } + } + @Override protected void init( PersistentTasksService persistentTasksService, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java index 67ce09c74e98c..e381659b1e01c 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java @@ -36,6 +36,7 @@ import org.elasticsearch.xpack.core.indexing.IndexerState; import org.elasticsearch.xpack.core.transform.TransformConfigVersion; import org.elasticsearch.xpack.core.transform.transforms.AuthorizationState; +import org.elasticsearch.xpack.core.transform.transforms.DestConfig; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpoint; import org.elasticsearch.xpack.core.transform.transforms.TransformCheckpointingInfo; import org.elasticsearch.xpack.core.transform.transforms.TransformConfig; @@ -45,6 +46,8 @@ import org.elasticsearch.xpack.core.transform.transforms.TransformState; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskParams; import org.elasticsearch.xpack.core.transform.transforms.TransformTaskState; +import org.elasticsearch.xpack.core.transform.transforms.latest.LatestConfigTests; +import org.elasticsearch.xpack.core.transform.transforms.pivot.PivotConfigTests; import org.elasticsearch.xpack.transform.DefaultTransformExtension; import org.elasticsearch.xpack.transform.TransformNode; import org.elasticsearch.xpack.transform.TransformServices; @@ -553,6 +556,75 @@ public void testDeriveBasicCheckpointingInfoWithNoIndexer() { assertThat(checkpointingInfo, sameInstance(TransformCheckpointingInfo.EMPTY)); } + public void testCheckAndResetDestinationIndexBlock() { + var currentConfig = randomConfigForDestIndex("oldDestination"); + var indexer = mock(ClientTransformIndexer.class); + when(indexer.getConfig()).thenReturn(currentConfig); + + var transformTask = createTransformTask(currentConfig, MockTransformAuditor.createMockAuditor()); + transformTask.initializeIndexer(indexer); + + transformTask.getContext().setIsWaitingForIndexToUnblock(true); + var updatedConfig = randomConfigForDestIndex("newDestination"); + + transformTask.checkAndResetDestinationIndexBlock(updatedConfig); + + assertFalse(transformTask.getContext().isWaitingForIndexToUnblock()); + } + + public void testCheckAndResetDestinationIndexBlock_NoChangeToDest() { + var currentConfig = randomConfigForDestIndex("oldDestination"); + var indexer = mock(ClientTransformIndexer.class); + when(indexer.getConfig()).thenReturn(currentConfig); + + var transformTask = createTransformTask(currentConfig, MockTransformAuditor.createMockAuditor()); + transformTask.initializeIndexer(indexer); + + transformTask.getContext().setIsWaitingForIndexToUnblock(true); + var updatedConfig = randomConfigForDestIndex("oldDestination"); + + transformTask.checkAndResetDestinationIndexBlock(updatedConfig); + + assertTrue(transformTask.getContext().isWaitingForIndexToUnblock()); + } + + public void testCheckAndResetDestinationIndexBlock_NotBlocked() { + var currentConfig = randomConfigForDestIndex("oldDestination"); + var indexer = mock(ClientTransformIndexer.class); + when(indexer.getConfig()).thenReturn(currentConfig); + + var transformTask = createTransformTask(currentConfig, MockTransformAuditor.createMockAuditor()); + transformTask.initializeIndexer(indexer); + + var updatedConfig = randomConfigForDestIndex("newDestination"); + + transformTask.checkAndResetDestinationIndexBlock(updatedConfig); + + assertFalse(transformTask.getContext().isWaitingForIndexToUnblock()); + } + + public void testCheckAndResetDestinationIndexBlock_NullIndexer() { + var currentConfig = randomConfigForDestIndex("oldDestination"); + var transformTask = createTransformTask(currentConfig, MockTransformAuditor.createMockAuditor()); + transformTask.getContext().setIsWaitingForIndexToUnblock(true); + + var updatedConfig = randomConfigForDestIndex("oldDestination"); + + transformTask.checkAndResetDestinationIndexBlock(updatedConfig); + + assertFalse(transformTask.getContext().isWaitingForIndexToUnblock()); + } + + private TransformConfig randomConfigForDestIndex(String indexName) { + var pivotOrLatest = randomBoolean(); + return TransformConfigTests.randomTransformConfigWithoutHeaders( + randomAlphaOfLengthBetween(1, 10), + pivotOrLatest ? null : PivotConfigTests.randomPivotConfig(), + pivotOrLatest ? LatestConfigTests.randomLatestConfig() : null, + new DestConfig(indexName, null, null) + ); + } + private TransformTask createTransformTask(TransformConfig transformConfig, MockTransformAuditor auditor) { var threadPool = mock(ThreadPool.class); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java index 846c3c47a2714..83fd0c8d3eada 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/DataStreamsUpgradeIT.java @@ -24,6 +24,7 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; import java.time.Instant; +import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; @@ -252,16 +253,27 @@ private static void createAndRolloverDataStream(String dataStreamName, int numRo assertOK(client().performRequest(putIndexTemplateRequest)); bulkLoadData(dataStreamName); for (int i = 0; i < numRollovers; i++) { - rollover(dataStreamName); + String oldIndexName = rollover(dataStreamName); + if (randomBoolean()) { + closeIndex(oldIndexName); + } bulkLoadData(dataStreamName); } } private void upgradeDataStream(String dataStreamName, int numRolloversOnOldCluster) throws Exception { Set indicesNeedingUpgrade = getDataStreamIndices(dataStreamName); + Set closedOldIndices = getClosedIndices(dataStreamName); final int explicitRolloverOnNewClusterCount = randomIntBetween(0, 2); for (int i = 0; i < explicitRolloverOnNewClusterCount; i++) { - rollover(dataStreamName); + String oldIndexName = rollover(dataStreamName); + if (randomBoolean()) { + if (i == 0) { + // Since this is the first rollover on the new cluster, the old index came from the old cluster + closedOldIndices.add(oldIndexName); + } + closeIndex(oldIndexName); + } } Request reindexRequest = new Request("POST", "/_migration/reindex"); reindexRequest.setJsonEntity(Strings.format(""" @@ -304,12 +316,14 @@ private void upgradeDataStream(String dataStreamName, int numRolloversOnOldClust */ assertThat( statusResponseMap.get("total_indices_requiring_upgrade"), - equalTo(originalWriteIndex + numRolloversOnOldCluster) + equalTo(originalWriteIndex + numRolloversOnOldCluster - closedOldIndices.size()) ); - assertThat(statusResponseMap.get("successes"), equalTo(numRolloversOnOldCluster + 1)); + assertThat(statusResponseMap.get("successes"), equalTo(numRolloversOnOldCluster + 1 - closedOldIndices.size())); // We expect all the original indices to have been deleted for (String oldIndex : indicesNeedingUpgrade) { - assertThat(indexExists(oldIndex), equalTo(false)); + if (closedOldIndices.contains(oldIndex) == false) { + assertThat(indexExists(oldIndex), equalTo(false)); + } } assertThat(getDataStreamIndices(dataStreamName).size(), equalTo(expectedTotalIndicesInDataStream)); } @@ -329,6 +343,29 @@ private Set getDataStreamIndices(String dataStreamName) throws IOExcepti return indices.stream().map(index -> index.get("index_name").toString()).collect(Collectors.toSet()); } + @SuppressWarnings("unchecked") + private Set getClosedIndices(String dataStreamName) throws IOException { + Set allIndices = getDataStreamIndices(dataStreamName); + Set closedIndices = new HashSet<>(); + Response response = client().performRequest(new Request("GET", "_cluster/state/blocks/indices")); + Map responseMap = XContentHelper.convertToMap(JsonXContent.jsonXContent, response.getEntity().getContent(), false); + Map blocks = (Map) responseMap.get("blocks"); + Map indices = (Map) blocks.get("indices"); + for (Map.Entry indexEntry : indices.entrySet()) { + String indexName = indexEntry.getKey(); + if (allIndices.contains(indexName)) { + Map blocksForIndex = (Map) indexEntry.getValue(); + for (Map.Entry blockEntry : blocksForIndex.entrySet()) { + Map block = (Map) blockEntry.getValue(); + if ("index closed".equals(block.get("description"))) { + closedIndices.add(indexName); + } + } + } + } + return closedIndices; + } + /* * Similar to isOriginalClusterCurrent, but returns true if the major versions of the clusters are the same. So true * for 8.6 and 8.17, but false for 7.17 and 8.18. @@ -370,9 +407,11 @@ static String formatInstant(Instant instant) { return DateFormatter.forPattern(FormatNames.STRICT_DATE_OPTIONAL_TIME.getName()).format(instant); } - private static void rollover(String dataStreamName) throws IOException { + private static String rollover(String dataStreamName) throws IOException { Request rolloverRequest = new Request("POST", "/" + dataStreamName + "/_rollover"); Response rolloverResponse = client().performRequest(rolloverRequest); assertOK(rolloverResponse); + String oldIndexName = (String) entityAsMap(rolloverResponse).get("old_index"); + return oldIndexName; } }