diff --git a/.buildkite/packer_cache.sh b/.buildkite/packer_cache.sh new file mode 100755 index 0000000000000..752914ba55c23 --- /dev/null +++ b/.buildkite/packer_cache.sh @@ -0,0 +1,34 @@ +#!/bin/bash + +ROOT_DIR=$(cd "$(dirname "$0")/.." && pwd) + +branches=($(cat "$ROOT_DIR/branches.json" | jq -r '.branches[].branch')) +for branch in "${branches[@]}"; do + echo "Resolving dependencies for ${branch} branch" + rm -rf "checkout/$branch" + git clone /opt/git-mirrors/elastic-elasticsearch --branch "$branch" --single-branch "checkout/$branch" + + CHECKOUT_DIR=$(cd "./checkout/${branch}" && pwd) + CI_DIR="$CHECKOUT_DIR/.ci" + + if [ "$(uname -m)" = "arm64" ] || [ "$(uname -m)" = "aarch64" ]; then + ## On ARM we use a different properties file for setting java home + ## Also, we don't bother attempting to resolve dependencies for the 6.8 branch + source "$CI_DIR/java-versions-aarch64.properties" + export JAVA16_HOME="$HOME/.java/jdk16" + else + source "$CI_DIR/java-versions.properties" + ## We are caching BWC versions too, need these so we can build those + export JAVA8_HOME="$HOME/.java/java8" + export JAVA11_HOME="$HOME/.java/java11" + export JAVA12_HOME="$HOME/.java/openjdk12" + export JAVA13_HOME="$HOME/.java/openjdk13" + export JAVA14_HOME="$HOME/.java/openjdk14" + export JAVA15_HOME="$HOME/.java/openjdk15" + export JAVA16_HOME="$HOME/.java/openjdk16" + fi + + export JAVA_HOME="$HOME/.java/$ES_BUILD_JAVA" + "checkout/${branch}/gradlew" --project-dir "$CHECKOUT_DIR" --parallel -s resolveAllDependencies -Dorg.gradle.warning.mode=none -DisCI + rm -rf "checkout/${branch}" +done diff --git a/.buildkite/pipelines/intake.yml b/.buildkite/pipelines/intake.yml index a200e871ec8e6..3271007a00077 100644 --- a/.buildkite/pipelines/intake.yml +++ b/.buildkite/pipelines/intake.yml @@ -48,7 +48,7 @@ steps: timeout_in_minutes: 300 matrix: setup: - BWC_VERSION: ["7.17.16", "8.11.3", "8.12.0", "8.13.0"] + BWC_VERSION: ["7.17.17", "8.11.4", "8.12.0", "8.13.0"] agents: provider: gcp image: family/elasticsearch-ubuntu-2004 diff --git a/.buildkite/pipelines/periodic-packaging.yml b/.buildkite/pipelines/periodic-packaging.yml index d397039128457..66eb1fc79e3ca 100644 --- a/.buildkite/pipelines/periodic-packaging.yml +++ b/.buildkite/pipelines/periodic-packaging.yml @@ -1089,6 +1089,22 @@ steps: env: BWC_VERSION: 7.17.16 + - label: "{{matrix.image}} / 7.17.17 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.17 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 7.17.17 + - label: "{{matrix.image}} / 8.0.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.0 timeout_in_minutes: 300 @@ -1761,6 +1777,22 @@ steps: env: BWC_VERSION: 8.11.3 + - label: "{{matrix.image}} / 8.11.4 / packaging-tests-upgrade" + command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.11.4 + timeout_in_minutes: 300 + matrix: + setup: + image: + - rocky-8 + - ubuntu-2004 + agents: + provider: gcp + image: family/elasticsearch-{{matrix.image}} + machineType: custom-16-32768 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 8.11.4 + - label: "{{matrix.image}} / 8.12.0 / packaging-tests-upgrade" command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.12.0 timeout_in_minutes: 300 diff --git a/.buildkite/pipelines/periodic-platform-support.yml b/.buildkite/pipelines/periodic-platform-support.yml index b52f8506885c9..faf904f2f8b04 100644 --- a/.buildkite/pipelines/periodic-platform-support.yml +++ b/.buildkite/pipelines/periodic-platform-support.yml @@ -80,3 +80,19 @@ steps: diskName: /dev/sda1 env: GRADLE_TASK: "{{matrix.GRADLE_TASK}}" + - group: platform-support-unix-aws + steps: + - label: "{{matrix.image}} / platform-support-aws" + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true functionalTests + timeout_in_minutes: 420 + matrix: + setup: + image: + - amazonlinux-2023 + agents: + provider: aws + imagePrefix: elasticsearch-{{matrix.image}} + instanceType: m6a.8xlarge + diskSizeGb: 350 + diskType: gp3 + diskName: /dev/sda1 diff --git a/.buildkite/pipelines/periodic.yml b/.buildkite/pipelines/periodic.yml index 248bfd52742d7..3ce048533d131 100644 --- a/.buildkite/pipelines/periodic.yml +++ b/.buildkite/pipelines/periodic.yml @@ -662,6 +662,16 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 7.17.16 + - label: 7.17.17 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.17.17#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 7.17.17 - label: 8.0.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.0.0#bwcTest timeout_in_minutes: 300 @@ -1082,6 +1092,16 @@ steps: buildDirectory: /dev/shm/bk env: BWC_VERSION: 8.11.3 + - label: 8.11.4 / bwc + command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.11.4#bwcTest + timeout_in_minutes: 300 + agents: + provider: gcp + image: family/elasticsearch-ubuntu-2004 + machineType: n1-standard-32 + buildDirectory: /dev/shm/bk + env: + BWC_VERSION: 8.11.4 - label: 8.12.0 / bwc command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.12.0#bwcTest timeout_in_minutes: 300 diff --git a/.buildkite/scripts/periodic.trigger.sh b/.buildkite/scripts/periodic.trigger.sh index 3571d112c5b6d..cc10a5ae41861 100755 --- a/.buildkite/scripts/periodic.trigger.sh +++ b/.buildkite/scripts/periodic.trigger.sh @@ -6,11 +6,26 @@ echo "steps:" source .buildkite/scripts/branches.sh +IS_FIRST=true +SKIP_DELAY="${SKIP_DELAY:-false}" + for BRANCH in "${BRANCHES[@]}"; do INTAKE_PIPELINE_SLUG="elasticsearch-intake" BUILD_JSON=$(curl -sH "Authorization: Bearer ${BUILDKITE_API_TOKEN}" "https://api.buildkite.com/v2/organizations/elastic/pipelines/${INTAKE_PIPELINE_SLUG}/builds?branch=${BRANCH}&state=passed&per_page=1" | jq '.[0] | {commit: .commit, url: .web_url}') LAST_GOOD_COMMIT=$(echo "${BUILD_JSON}" | jq -r '.commit') + # Put a delay between each branch's set of pipelines by prepending each non-first branch with a sleep + # This is to smooth out the spike in agent requests + if [[ "$IS_FIRST" != "true" && "$SKIP_DELAY" != "true" ]]; then + cat < new LongArrayVector(LongStream.range(0, BLOCK_LENGTH).toArray(), BLOCK_LENGTH).asBlock(); - case VECTOR_DOUBLES -> new DoubleArrayVector( + case VECTOR_LONGS -> blockFactory.newLongArrayVector(LongStream.range(0, BLOCK_LENGTH).toArray(), BLOCK_LENGTH).asBlock(); + case VECTOR_DOUBLES -> blockFactory.newDoubleArrayVector( LongStream.range(0, BLOCK_LENGTH).mapToDouble(l -> Long.valueOf(l).doubleValue()).toArray(), BLOCK_LENGTH ).asBlock(); case MULTIVALUED_LONGS -> { - var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); builder.beginPositionEntry(); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendLong(i); @@ -462,7 +463,7 @@ private static Block dataBlock(String blockType) { yield builder.build(); } case HALF_NULL_LONGS -> { - var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendLong(i); builder.appendNull(); @@ -470,7 +471,7 @@ private static Block dataBlock(String blockType) { yield builder.build(); } case HALF_NULL_DOUBLES -> { - var builder = DoubleBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newDoubleBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendDouble(i); builder.appendNull(); @@ -502,7 +503,7 @@ private static Block groupingBlock(String grouping, String blockType) { }; return switch (grouping) { case LONGS -> { - var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { for (int v = 0; v < valuesPerGroup; v++) { builder.appendLong(i % GROUPS); @@ -511,7 +512,7 @@ private static Block groupingBlock(String grouping, String blockType) { yield builder.build(); } case INTS -> { - var builder = IntBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newIntBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { for (int v = 0; v < valuesPerGroup; v++) { builder.appendInt(i % GROUPS); @@ -520,7 +521,7 @@ private static Block groupingBlock(String grouping, String blockType) { yield builder.build(); } case DOUBLES -> { - var builder = DoubleBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newDoubleBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { for (int v = 0; v < valuesPerGroup; v++) { builder.appendDouble(i % GROUPS); @@ -529,7 +530,7 @@ private static Block groupingBlock(String grouping, String blockType) { yield builder.build(); } case BOOLEANS -> { - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(BLOCK_LENGTH); + BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { for (int v = 0; v < valuesPerGroup; v++) { builder.appendBoolean(i % 2 == 1); @@ -538,7 +539,7 @@ private static Block groupingBlock(String grouping, String blockType) { yield builder.build(); } case BYTES_REFS -> { - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(BLOCK_LENGTH); + BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { for (int v = 0; v < valuesPerGroup; v++) { builder.appendBytesRef(bytesGroup(i % GROUPS)); @@ -574,8 +575,9 @@ private static void run(String grouping, String op, String blockType, int opCoun default -> throw new IllegalArgumentException(); }; - Operator operator = operator(grouping, op, dataType); - Page page = page(grouping, blockType); + DriverContext driverContext = driverContext(); + Operator operator = operator(driverContext, grouping, op, dataType); + Page page = page(driverContext.blockFactory(), grouping, blockType); for (int i = 0; i < opCount; i++) { operator.addInput(page); } @@ -584,9 +586,6 @@ private static void run(String grouping, String op, String blockType, int opCoun } static DriverContext driverContext() { - return new DriverContext( - BigArrays.NON_RECYCLING_INSTANCE, - BlockFactory.getInstance(new NoopCircuitBreaker("noop"), BigArrays.NON_RECYCLING_INSTANCE) - ); + return new DriverContext(BigArrays.NON_RECYCLING_INSTANCE, blockFactory); } } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/BlockBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/BlockBenchmark.java new file mode 100644 index 0000000000000..e0281dbb856d4 --- /dev/null +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/BlockBenchmark.java @@ -0,0 +1,849 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.benchmark.compute.operator; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.BitArray; +import org.elasticsearch.common.util.BytesRefArray; +import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBigArrayBlock; +import org.elasticsearch.compute.data.BooleanBigArrayVector; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BooleanVector; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BytesRefVector; +import org.elasticsearch.compute.data.DoubleBigArrayBlock; +import org.elasticsearch.compute.data.DoubleBigArrayVector; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.DoubleVector; +import org.elasticsearch.compute.data.IntBigArrayBlock; +import org.elasticsearch.compute.data.IntBigArrayVector; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.IntVector; +import org.elasticsearch.compute.data.LongBigArrayBlock; +import org.elasticsearch.compute.data.LongBigArrayVector; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.LongVector; +import org.openjdk.jmh.annotations.Benchmark; +import org.openjdk.jmh.annotations.BenchmarkMode; +import org.openjdk.jmh.annotations.Fork; +import org.openjdk.jmh.annotations.Level; +import org.openjdk.jmh.annotations.Measurement; +import org.openjdk.jmh.annotations.Mode; +import org.openjdk.jmh.annotations.OperationsPerInvocation; +import org.openjdk.jmh.annotations.OutputTimeUnit; +import org.openjdk.jmh.annotations.Param; +import org.openjdk.jmh.annotations.Scope; +import org.openjdk.jmh.annotations.Setup; +import org.openjdk.jmh.annotations.State; +import org.openjdk.jmh.annotations.TearDown; +import org.openjdk.jmh.annotations.Warmup; + +import java.util.ArrayList; +import java.util.BitSet; +import java.util.Collections; +import java.util.List; +import java.util.Random; +import java.util.concurrent.TimeUnit; +import java.util.stream.IntStream; + +@Warmup(iterations = 5) +@Measurement(iterations = 7) +@BenchmarkMode(Mode.AverageTime) +@OutputTimeUnit(TimeUnit.NANOSECONDS) +@State(Scope.Thread) +@Fork(1) +public class BlockBenchmark { + + /** + * All data type/block kind combinations to be loaded before the benchmark. + * It is important to be exhaustive here so that all implementers of {@link IntBlock#getInt(int)} are actually loaded when we benchmark + * {@link IntBlock}s etc. + */ + // We could also consider DocBlocks/DocVectors but they do not implement any of the typed block interfaces like IntBlock etc. + public static final String[] RELEVANT_TYPE_BLOCK_COMBINATIONS = { + "boolean/array", + "boolean/array-multivalue-null", + "boolean/big-array", + "boolean/big-array-multivalue-null", + "boolean/vector", + "boolean/vector-big-array", + "boolean/vector-const", + "BytesRef/array", + "BytesRef/array-multivalue-null", + "BytesRef/vector", + "BytesRef/vector-const", + "double/array", + "double/array-multivalue-null", + "double/big-array", + "double/big-array-multivalue-null", + "double/vector", + "double/vector-big-array", + "double/vector-const", + "int/array", + "int/array-multivalue-null", + "int/big-array", + "int/big-array-multivalue-null", + "int/vector", + "int/vector-big-array", + "int/vector-const", + "long/array", + "long/array-multivalue-null", + "long/big-array", + "long/big-array-multivalue-null", + "long/vector", + "long/vector-big-array", + "long/vector-const" }; + public static final int NUM_BLOCKS_PER_ITERATION = 1024; + public static final int BLOCK_TOTAL_POSITIONS = 8096; + + private static final double MV_PERCENTAGE = 0.3; + private static final double NULL_PERCENTAGE = 0.1; + private static final int MAX_MV_ELEMENTS = 100; + private static final int MAX_BYTES_REF_LENGTH = 255; + + private static final Random random = new Random(); + + private static final BlockFactory blockFactory = BlockFactory.getInstance( + new NoopCircuitBreaker("noop"), + BigArrays.NON_RECYCLING_INSTANCE + ); + + static { + // Smoke test all the expected values and force loading subclasses more like prod + int totalPositions = 10; + long[] actualCheckSums = new long[NUM_BLOCKS_PER_ITERATION]; + + for (String paramString : RELEVANT_TYPE_BLOCK_COMBINATIONS) { + String[] params = paramString.split("/"); + String dataType = params[0]; + String blockKind = params[1]; + + BenchmarkBlocks data = buildBlocks(dataType, blockKind, totalPositions); + int[][] traversalOrders = createTraversalOrders(data.blocks, false); + run(dataType, data, traversalOrders, actualCheckSums); + assertCheckSums(data, actualCheckSums); + } + } + + private record BenchmarkBlocks(Block[] blocks, long[] checkSums) {}; + + private static BenchmarkBlocks buildBlocks(String dataType, String blockKind, int totalPositions) { + Block[] blocks = new Block[NUM_BLOCKS_PER_ITERATION]; + long[] checkSums = new long[NUM_BLOCKS_PER_ITERATION]; + + switch (dataType) { + case "boolean" -> { + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + if (blockKind.equalsIgnoreCase("vector-const")) { + BooleanVector vector = blockFactory.newConstantBooleanVector(random.nextBoolean(), totalPositions); + blocks[blockIndex] = vector.asBlock(); + continue; + } + + boolean[] values = new boolean[totalPositions]; + for (int i = 0; i < totalPositions; i++) { + values[i] = random.nextBoolean(); + } + + switch (blockKind) { + case "array" -> { + blocks[blockIndex] = blockFactory.newBooleanArrayBlock( + values, + totalPositions, + null, + null, + Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING + ); + } + case "array-multivalue-null" -> { + int[] firstValueIndexes = randomFirstValueIndexes(totalPositions); + int positionCount = firstValueIndexes.length - 1; + BitSet nulls = randomNulls(positionCount); + + blocks[blockIndex] = blockFactory.newBooleanArrayBlock( + values, + positionCount, + firstValueIndexes, + nulls, + Block.MvOrdering.UNORDERED + ); + } + case "big-array" -> { + BitArray valuesBigArray = new BitArray(totalPositions, BigArrays.NON_RECYCLING_INSTANCE); + for (int i = 0; i < values.length; i++) { + if (values[i]) { + valuesBigArray.set(i); + } + } + + blocks[blockIndex] = new BooleanBigArrayBlock( + valuesBigArray, + totalPositions, + null, + null, + Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING, + blockFactory + ); + } + case "big-array-multivalue-null" -> { + int[] firstValueIndexes = randomFirstValueIndexes(totalPositions); + int positionCount = firstValueIndexes.length - 1; + BitSet nulls = randomNulls(positionCount); + BitArray valuesBigArray = new BitArray(totalPositions, BigArrays.NON_RECYCLING_INSTANCE); + for (int i = 0; i < values.length; i++) { + if (values[i]) { + valuesBigArray.set(i); + } + } + + blocks[blockIndex] = new BooleanBigArrayBlock( + valuesBigArray, + positionCount, + firstValueIndexes, + nulls, + Block.MvOrdering.UNORDERED, + blockFactory + ); + } + case "vector" -> { + BooleanVector vector = blockFactory.newBooleanArrayVector(values, totalPositions); + blocks[blockIndex] = vector.asBlock(); + } + case "vector-big-array" -> { + BitArray valuesBigArray = new BitArray(totalPositions, BigArrays.NON_RECYCLING_INSTANCE); + for (int i = 0; i < values.length; i++) { + if (values[i]) { + valuesBigArray.set(i); + } + } + BooleanVector vector = new BooleanBigArrayVector(valuesBigArray, totalPositions, blockFactory); + blocks[blockIndex] = vector.asBlock(); + } + default -> { + throw new IllegalStateException("illegal block kind [" + blockKind + "]"); + } + } + } + + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + BooleanBlock block = (BooleanBlock) blocks[blockIndex]; + checkSums[blockIndex] = computeBooleanCheckSum(block, IntStream.range(0, block.getPositionCount()).toArray()); + } + } + case "BytesRef" -> { + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + if (blockKind.equalsIgnoreCase("vector-const")) { + byte[] bytes = new byte[random.nextInt(MAX_BYTES_REF_LENGTH)]; + random.nextBytes(bytes); + + BytesRefVector vector = blockFactory.newConstantBytesRefVector(new BytesRef(bytes), totalPositions); + blocks[blockIndex] = vector.asBlock(); + continue; + } + + BytesRefArray values = new BytesRefArray(totalPositions, BigArrays.NON_RECYCLING_INSTANCE); + byte[] bytes; + for (int i = 0; i < totalPositions; i++) { + bytes = new byte[random.nextInt(MAX_BYTES_REF_LENGTH)]; + random.nextBytes(bytes); + values.append(new BytesRef(bytes)); + } + + switch (blockKind) { + case "array" -> { + blocks[blockIndex] = blockFactory.newBytesRefArrayBlock( + values, + totalPositions, + null, + null, + Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING + ); + } + case "array-multivalue-null" -> { + int[] firstValueIndexes = randomFirstValueIndexes(totalPositions); + int positionCount = firstValueIndexes.length - 1; + BitSet nulls = randomNulls(positionCount); + + blocks[blockIndex] = blockFactory.newBytesRefArrayBlock( + values, + positionCount, + firstValueIndexes, + nulls, + Block.MvOrdering.UNORDERED + ); + } + case "vector" -> { + BytesRefVector vector = blockFactory.newBytesRefArrayVector(values, totalPositions); + blocks[blockIndex] = vector.asBlock(); + } + default -> { + throw new IllegalStateException("illegal block kind [" + blockKind + "]"); + } + } + } + + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + BytesRefBlock block = (BytesRefBlock) blocks[blockIndex]; + checkSums[blockIndex] = computeBytesRefCheckSum(block, IntStream.range(0, block.getPositionCount()).toArray()); + } + } + case "double" -> { + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + if (blockKind.equalsIgnoreCase("vector-const")) { + DoubleVector vector = blockFactory.newConstantDoubleVector(random.nextDouble() * 1000000.0, totalPositions); + blocks[blockIndex] = vector.asBlock(); + continue; + } + + double[] values = new double[totalPositions]; + for (int i = 0; i < totalPositions; i++) { + values[i] = random.nextDouble() * 1000000.0; + } + + switch (blockKind) { + case "array" -> { + blocks[blockIndex] = blockFactory.newDoubleArrayBlock( + values, + totalPositions, + null, + null, + Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING + ); + } + case "array-multivalue-null" -> { + int[] firstValueIndexes = randomFirstValueIndexes(totalPositions); + int positionCount = firstValueIndexes.length - 1; + BitSet nulls = randomNulls(positionCount); + + blocks[blockIndex] = blockFactory.newDoubleArrayBlock( + values, + positionCount, + firstValueIndexes, + nulls, + Block.MvOrdering.UNORDERED + ); + } + case "big-array" -> { + DoubleArray valuesBigArray = blockFactory.bigArrays().newDoubleArray(totalPositions, false); + for (int i = 0; i < values.length; i++) { + valuesBigArray.set(i, values[i]); + } + + blocks[blockIndex] = new DoubleBigArrayBlock( + valuesBigArray, + totalPositions, + null, + null, + Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING, + blockFactory + ); + } + case "big-array-multivalue-null" -> { + int[] firstValueIndexes = randomFirstValueIndexes(totalPositions); + int positionCount = firstValueIndexes.length - 1; + BitSet nulls = randomNulls(positionCount); + DoubleArray valuesBigArray = blockFactory.bigArrays().newDoubleArray(totalPositions, false); + for (int i = 0; i < values.length; i++) { + valuesBigArray.set(i, values[i]); + } + + blocks[blockIndex] = new DoubleBigArrayBlock( + valuesBigArray, + positionCount, + firstValueIndexes, + nulls, + Block.MvOrdering.UNORDERED, + blockFactory + ); + } + case "vector" -> { + DoubleVector vector = blockFactory.newDoubleArrayVector(values, totalPositions); + blocks[blockIndex] = vector.asBlock(); + } + case "vector-big-array" -> { + DoubleArray valuesBigArray = blockFactory.bigArrays().newDoubleArray(totalPositions, false); + for (int i = 0; i < values.length; i++) { + valuesBigArray.set(i, values[i]); + } + DoubleVector vector = new DoubleBigArrayVector(valuesBigArray, totalPositions, blockFactory); + blocks[blockIndex] = vector.asBlock(); + } + default -> { + throw new IllegalStateException("illegal block kind [" + blockKind + "]"); + } + } + } + + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + DoubleBlock block = (DoubleBlock) blocks[blockIndex]; + checkSums[blockIndex] = computeDoubleCheckSum(block, IntStream.range(0, block.getPositionCount()).toArray()); + } + } + case "int" -> { + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + if (blockKind.equalsIgnoreCase("vector-const")) { + IntVector vector = blockFactory.newConstantIntVector(random.nextInt(), totalPositions); + blocks[blockIndex] = vector.asBlock(); + continue; + } + + int[] values = new int[totalPositions]; + for (int i = 0; i < totalPositions; i++) { + values[i] = random.nextInt(); + } + + switch (blockKind) { + case "array" -> { + blocks[blockIndex] = blockFactory.newIntArrayBlock( + values, + totalPositions, + null, + null, + Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING + ); + } + case "array-multivalue-null" -> { + int[] firstValueIndexes = randomFirstValueIndexes(totalPositions); + int positionCount = firstValueIndexes.length - 1; + BitSet nulls = randomNulls(positionCount); + + blocks[blockIndex] = blockFactory.newIntArrayBlock( + values, + positionCount, + firstValueIndexes, + nulls, + Block.MvOrdering.UNORDERED + ); + } + case "big-array" -> { + IntArray valuesBigArray = blockFactory.bigArrays().newIntArray(totalPositions, false); + for (int i = 0; i < values.length; i++) { + valuesBigArray.set(i, values[i]); + } + + blocks[blockIndex] = new IntBigArrayBlock( + valuesBigArray, + totalPositions, + null, + null, + Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING, + blockFactory + ); + } + case "big-array-multivalue-null" -> { + int[] firstValueIndexes = randomFirstValueIndexes(totalPositions); + int positionCount = firstValueIndexes.length - 1; + BitSet nulls = randomNulls(positionCount); + IntArray valuesBigArray = blockFactory.bigArrays().newIntArray(totalPositions, false); + for (int i = 0; i < values.length; i++) { + valuesBigArray.set(i, values[i]); + } + + blocks[blockIndex] = new IntBigArrayBlock( + valuesBigArray, + positionCount, + firstValueIndexes, + nulls, + Block.MvOrdering.UNORDERED, + blockFactory + ); + } + case "vector" -> { + IntVector vector = blockFactory.newIntArrayVector(values, totalPositions); + blocks[blockIndex] = vector.asBlock(); + } + case "vector-big-array" -> { + IntArray valuesBigArray = blockFactory.bigArrays().newIntArray(totalPositions, false); + for (int i = 0; i < values.length; i++) { + valuesBigArray.set(i, values[i]); + } + IntVector vector = new IntBigArrayVector(valuesBigArray, totalPositions, blockFactory); + blocks[blockIndex] = vector.asBlock(); + } + default -> { + throw new IllegalStateException("illegal block kind [" + blockKind + "]"); + } + } + } + + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + IntBlock block = (IntBlock) blocks[blockIndex]; + checkSums[blockIndex] = computeIntCheckSum(block, IntStream.range(0, block.getPositionCount()).toArray()); + } + } + case "long" -> { + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + if (blockKind.equalsIgnoreCase("vector-const")) { + LongVector vector = blockFactory.newConstantLongVector(random.nextLong(), totalPositions); + blocks[blockIndex] = vector.asBlock(); + continue; + } + + long[] values = new long[totalPositions]; + for (int i = 0; i < totalPositions; i++) { + values[i] = random.nextLong(); + } + + switch (blockKind) { + case "array" -> { + blocks[blockIndex] = blockFactory.newLongArrayBlock( + values, + totalPositions, + null, + null, + Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING + ); + } + case "array-multivalue-null" -> { + int[] firstValueIndexes = randomFirstValueIndexes(totalPositions); + int positionCount = firstValueIndexes.length - 1; + BitSet nulls = randomNulls(positionCount); + + blocks[blockIndex] = blockFactory.newLongArrayBlock( + values, + positionCount, + firstValueIndexes, + nulls, + Block.MvOrdering.UNORDERED + ); + } + case "big-array" -> { + LongArray valuesBigArray = blockFactory.bigArrays().newLongArray(totalPositions, false); + for (int i = 0; i < values.length; i++) { + valuesBigArray.set(i, values[i]); + } + + blocks[blockIndex] = new LongBigArrayBlock( + valuesBigArray, + totalPositions, + null, + null, + Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING, + blockFactory + ); + } + case "big-array-multivalue-null" -> { + int[] firstValueIndexes = randomFirstValueIndexes(totalPositions); + int positionCount = firstValueIndexes.length - 1; + BitSet nulls = randomNulls(positionCount); + LongArray valuesBigArray = blockFactory.bigArrays().newLongArray(totalPositions, false); + for (int i = 0; i < values.length; i++) { + valuesBigArray.set(i, values[i]); + } + + blocks[blockIndex] = new LongBigArrayBlock( + valuesBigArray, + positionCount, + firstValueIndexes, + nulls, + Block.MvOrdering.UNORDERED, + blockFactory + ); + } + case "vector" -> { + LongVector vector = blockFactory.newLongArrayVector(values, totalPositions); + blocks[blockIndex] = vector.asBlock(); + } + case "vector-big-array" -> { + LongArray valuesBigArray = blockFactory.bigArrays().newLongArray(totalPositions, false); + for (int i = 0; i < values.length; i++) { + valuesBigArray.set(i, values[i]); + } + LongVector vector = new LongBigArrayVector(valuesBigArray, totalPositions, blockFactory); + blocks[blockIndex] = vector.asBlock(); + } + default -> { + throw new IllegalStateException("illegal block kind [" + blockKind + "]"); + } + } + } + + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + LongBlock block = (LongBlock) blocks[blockIndex]; + checkSums[blockIndex] = computeLongCheckSum(block, IntStream.range(0, block.getPositionCount()).toArray()); + } + } + default -> { + throw new IllegalStateException("illegal data type [" + dataType + "]"); + } + } + + return new BenchmarkBlocks(blocks, checkSums); + } + + private static int[][] createTraversalOrders(Block[] blocks, boolean randomized) { + int[][] orders = new int[blocks.length][]; + + for (int i = 0; i < blocks.length; i++) { + IntStream positionsStream = IntStream.range(0, blocks[i].getPositionCount()); + + if (randomized) { + List positions = new java.util.ArrayList<>(positionsStream.boxed().toList()); + Collections.shuffle(positions, random); + orders[i] = positions.stream().mapToInt(x -> x).toArray(); + } else { + orders[i] = positionsStream.toArray(); + } + } + + return orders; + } + + private static int[] randomFirstValueIndexes(int totalPositions) { + ArrayList firstValueIndexes = new ArrayList<>(); + firstValueIndexes.add(0); + + int currentPosition = 0; + int nextPosition; + while (currentPosition < totalPositions) { + if (random.nextDouble() < MV_PERCENTAGE) { + nextPosition = Math.min(currentPosition + 1 + random.nextInt(MAX_MV_ELEMENTS), totalPositions); + } else { + nextPosition = currentPosition + 1; + } + firstValueIndexes.add(nextPosition); + currentPosition = nextPosition; + } + + return firstValueIndexes.stream().mapToInt(x -> x).toArray(); + } + + private static BitSet randomNulls(int positionCount) { + BitSet nulls = new BitSet(positionCount); + for (int i = 0; i < positionCount; i++) { + if (random.nextDouble() < NULL_PERCENTAGE) { + nulls.set(i); + } + } + + return nulls; + } + + private static void run(String dataType, BenchmarkBlocks data, int[][] traversalOrders, long[] resultCheckSums) { + switch (dataType) { + case "boolean" -> { + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + BooleanBlock block = (BooleanBlock) data.blocks[blockIndex]; + + resultCheckSums[blockIndex] = computeBooleanCheckSum(block, traversalOrders[blockIndex]); + } + } + case "BytesRef" -> { + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + BytesRefBlock block = (BytesRefBlock) data.blocks[blockIndex]; + + resultCheckSums[blockIndex] = computeBytesRefCheckSum(block, traversalOrders[blockIndex]); + } + } + case "double" -> { + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + DoubleBlock block = (DoubleBlock) data.blocks[blockIndex]; + + resultCheckSums[blockIndex] = computeDoubleCheckSum(block, traversalOrders[blockIndex]); + } + } + case "int" -> { + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + IntBlock block = (IntBlock) data.blocks[blockIndex]; + + resultCheckSums[blockIndex] = computeIntCheckSum(block, traversalOrders[blockIndex]); + } + } + case "long" -> { + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + LongBlock block = (LongBlock) data.blocks[blockIndex]; + + resultCheckSums[blockIndex] = computeLongCheckSum(block, traversalOrders[blockIndex]); + } + } + default -> { + throw new IllegalStateException(); + } + } + } + + private static void assertCheckSums(BenchmarkBlocks data, long[] actualCheckSums) { + for (int blockIndex = 0; blockIndex < NUM_BLOCKS_PER_ITERATION; blockIndex++) { + if (actualCheckSums[blockIndex] != data.checkSums[blockIndex]) { + throw new AssertionError("checksums do not match for block [" + blockIndex + "]"); + } + } + } + + private static long computeBooleanCheckSum(BooleanBlock block, int[] traversalOrder) { + long sum = 0; + + for (int position : traversalOrder) { + if (block.isNull(position)) { + continue; + } + int start = block.getFirstValueIndex(position); + int end = start + block.getValueCount(position); + for (int i = start; i < end; i++) { + sum += block.getBoolean(i) ? 1 : 0; + } + } + + return sum; + } + + private static long computeBytesRefCheckSum(BytesRefBlock block, int[] traversalOrder) { + long sum = 0; + BytesRef currentValue = new BytesRef(); + + for (int position : traversalOrder) { + if (block.isNull(position)) { + continue; + } + int start = block.getFirstValueIndex(position); + int end = start + block.getValueCount(position); + for (int i = start; i < end; i++) { + block.getBytesRef(i, currentValue); + sum += currentValue.length > 0 ? currentValue.bytes[0] : 0; + } + } + + return sum; + } + + private static long computeDoubleCheckSum(DoubleBlock block, int[] traversalOrder) { + double sum = 0; + + for (int position : traversalOrder) { + if (block.isNull(position)) { + continue; + } + int start = block.getFirstValueIndex(position); + int end = start + block.getValueCount(position); + for (int i = start; i < end; i++) { + sum += block.getDouble(i); + } + } + + return (long) sum; + } + + private static long computeIntCheckSum(IntBlock block, int[] traversalOrder) { + int sum = 0; + + for (int position : traversalOrder) { + if (block.isNull(position)) { + continue; + } + int start = block.getFirstValueIndex(position); + int end = start + block.getValueCount(position); + for (int i = start; i < end; i++) { + sum += block.getInt(i); + } + } + + return sum; + } + + private static long computeLongCheckSum(LongBlock block, int[] traversalOrder) { + long sum = 0; + + for (int position : traversalOrder) { + if (block.isNull(position)) { + continue; + } + int start = block.getFirstValueIndex(position); + int end = start + block.getValueCount(position); + for (int i = start; i < end; i++) { + sum += block.getLong(i); + } + } + + return sum; + } + + private static boolean isRandom(String accessType) { + return accessType.equalsIgnoreCase("random"); + } + + /** + * Must be a subset of {@link BlockBenchmark#RELEVANT_TYPE_BLOCK_COMBINATIONS} + */ + @Param( + { + "boolean/array", + "boolean/array-multivalue-null", + "boolean/big-array", + "boolean/big-array-multivalue-null", + "boolean/vector", + "boolean/vector-big-array", + "boolean/vector-const", + "BytesRef/array", + "BytesRef/array-multivalue-null", + "BytesRef/vector", + "BytesRef/vector-const", + "double/array", + "double/array-multivalue-null", + "double/big-array", + "double/big-array-multivalue-null", + "double/vector", + "double/vector-big-array", + "double/vector-const", + "int/array", + "int/array-multivalue-null", + "int/big-array", + "int/big-array-multivalue-null", + "int/vector", + "int/vector-big-array", + "int/vector-const", + "long/array", + "long/array-multivalue-null", + "long/big-array", + "long/big-array-multivalue-null", + "long/vector", + "long/vector-big-array", + "long/vector-const" } + ) + public String dataTypeAndBlockKind; + + @Param({ "sequential", "random" }) + public String accessType; + + private BenchmarkBlocks data; + + private int[][] traversalOrders; + + private final long[] actualCheckSums = new long[NUM_BLOCKS_PER_ITERATION]; + + @Setup + public void setup() { + String[] params = dataTypeAndBlockKind.split("/"); + String dataType = params[0]; + String blockKind = params[1]; + + data = buildBlocks(dataType, blockKind, BLOCK_TOTAL_POSITIONS); + traversalOrders = createTraversalOrders(data.blocks, isRandom(accessType)); + } + + @Benchmark + @OperationsPerInvocation(NUM_BLOCKS_PER_ITERATION * BLOCK_TOTAL_POSITIONS) + public void run() { + String[] params = dataTypeAndBlockKind.split("/"); + String dataType = params[0]; + + run(dataType, data, traversalOrders, actualCheckSums); + } + + @TearDown(Level.Iteration) + public void assertCheckSums() { + assertCheckSums(data, actualCheckSums); + } +} diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java index 3a1142ad87d2f..1765897ba35e7 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/EvalBenchmark.java @@ -14,7 +14,6 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; -import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; import org.elasticsearch.compute.data.Page; @@ -59,6 +58,12 @@ @State(Scope.Thread) @Fork(1) public class EvalBenchmark { + private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE; // TODO real big arrays? + private static final BlockFactory blockFactory = BlockFactory.getInstance( + new NoopCircuitBreaker("noop"), + BigArrays.NON_RECYCLING_INSTANCE + ); + private static final int BLOCK_LENGTH = 8 * 1024; static final DriverContext driverContext = new DriverContext( @@ -207,15 +212,15 @@ private static void checkExpected(String operation, Page actual) { private static Page page(String operation) { return switch (operation) { case "abs", "add", "date_trunc", "equal_to_const" -> { - var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendLong(i * 100_000); } yield new Page(builder.build()); } case "long_equal_to_long" -> { - var lhs = LongBlock.newBlockBuilder(BLOCK_LENGTH); - var rhs = LongBlock.newBlockBuilder(BLOCK_LENGTH); + var lhs = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); + var rhs = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { lhs.appendLong(i * 100_000); rhs.appendLong(i * 100_000); @@ -223,8 +228,8 @@ private static Page page(String operation) { yield new Page(lhs.build(), rhs.build()); } case "long_equal_to_int" -> { - var lhs = LongBlock.newBlockBuilder(BLOCK_LENGTH); - var rhs = IntBlock.newBlockBuilder(BLOCK_LENGTH); + var lhs = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); + var rhs = blockFactory.newIntBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { lhs.appendLong(i * 100_000); rhs.appendInt(i * 100_000); @@ -232,7 +237,7 @@ private static Page page(String operation) { yield new Page(lhs.build(), rhs.build()); } case "mv_min", "mv_min_ascending" -> { - var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); if (operation.endsWith("ascending")) { builder.mvOrdering(Block.MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING); } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/MultivalueDedupeBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/MultivalueDedupeBenchmark.java index 09cdc8b269ad3..c32aa1184ddaa 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/MultivalueDedupeBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/MultivalueDedupeBenchmark.java @@ -10,6 +10,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; @@ -43,6 +45,12 @@ @State(Scope.Thread) @Fork(1) public class MultivalueDedupeBenchmark { + private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE; // TODO real big arrays? + private static final BlockFactory blockFactory = BlockFactory.getInstance( + new NoopCircuitBreaker("noop"), + BigArrays.NON_RECYCLING_INSTANCE + ); + @Param({ "BOOLEAN", "BYTES_REF", "DOUBLE", "INT", "LONG" }) private ElementType elementType; @@ -58,7 +66,7 @@ public class MultivalueDedupeBenchmark { public void setup() { this.block = switch (elementType) { case BOOLEAN -> { - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); + BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); for (int p = 0; p < AggregatorBenchmark.BLOCK_LENGTH; p++) { List values = new ArrayList<>(); for (int i = 0; i < size; i++) { @@ -77,7 +85,7 @@ public void setup() { yield builder.build(); } case BYTES_REF -> { - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); + BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); for (int p = 0; p < AggregatorBenchmark.BLOCK_LENGTH; p++) { List values = new ArrayList<>(); for (int i = 0; i < size; i++) { @@ -96,7 +104,7 @@ public void setup() { yield builder.build(); } case DOUBLE -> { - DoubleBlock.Builder builder = DoubleBlock.newBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); + DoubleBlock.Builder builder = blockFactory.newDoubleBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); for (int p = 0; p < AggregatorBenchmark.BLOCK_LENGTH; p++) { List values = new ArrayList<>(); for (int i = 0; i < size; i++) { @@ -115,7 +123,7 @@ public void setup() { yield builder.build(); } case INT -> { - IntBlock.Builder builder = IntBlock.newBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); + IntBlock.Builder builder = blockFactory.newIntBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); for (int p = 0; p < AggregatorBenchmark.BLOCK_LENGTH; p++) { List values = new ArrayList<>(); for (int i = 0; i < size; i++) { @@ -134,7 +142,7 @@ public void setup() { yield builder.build(); } case LONG -> { - LongBlock.Builder builder = LongBlock.newBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); + LongBlock.Builder builder = blockFactory.newLongBlockBuilder(AggregatorBenchmark.BLOCK_LENGTH * (size + repeats)); for (int p = 0; p < AggregatorBenchmark.BLOCK_LENGTH; p++) { List values = new ArrayList<>(); for (long i = 0; i < size; i++) { @@ -159,18 +167,18 @@ public void setup() { @Benchmark @OperationsPerInvocation(AggregatorBenchmark.BLOCK_LENGTH) public void adaptive() { - MultivalueDedupe.dedupeToBlockAdaptive(block, BlockFactory.getNonBreakingInstance()).close(); + MultivalueDedupe.dedupeToBlockAdaptive(block, blockFactory).close(); } @Benchmark @OperationsPerInvocation(AggregatorBenchmark.BLOCK_LENGTH) public void copyAndSort() { - MultivalueDedupe.dedupeToBlockUsingCopyAndSort(block, BlockFactory.getNonBreakingInstance()).close(); + MultivalueDedupe.dedupeToBlockUsingCopyAndSort(block, blockFactory).close(); } @Benchmark @OperationsPerInvocation(AggregatorBenchmark.BLOCK_LENGTH) public void copyMissing() { - MultivalueDedupe.dedupeToBlockUsingCopyMissing(block, BlockFactory.getNonBreakingInstance()).close(); + MultivalueDedupe.dedupeToBlockUsingCopyMissing(block, blockFactory).close(); } } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java index d723ea3e1a6b3..3d5a36ea288b4 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/TopNBenchmark.java @@ -10,16 +10,15 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Operator; import org.elasticsearch.compute.operator.topn.TopNEncoder; @@ -51,6 +50,12 @@ @State(Scope.Thread) @Fork(1) public class TopNBenchmark { + private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE; // TODO real big arrays? + private static final BlockFactory blockFactory = BlockFactory.getInstance( + new NoopCircuitBreaker("noop"), + BigArrays.NON_RECYCLING_INSTANCE + ); + private static final int BLOCK_LENGTH = 8 * 1024; private static final String LONGS = "longs"; @@ -110,7 +115,7 @@ private static Operator operator(String data, int topCount) { ClusterSettings.createBuiltInClusterSettings() ); return new TopNOperator( - BlockFactory.getNonBreakingInstance(), + blockFactory, breakerService.getBreaker(CircuitBreaker.REQUEST), topCount, elementTypes, @@ -137,35 +142,35 @@ private static Page page(String data) { private static Block block(String data) { return switch (data) { case LONGS -> { - var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendLong(i); } yield builder.build(); } case INTS -> { - var builder = IntBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newIntBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendInt(i); } yield builder.build(); } case DOUBLES -> { - var builder = DoubleBlock.newBlockBuilder(BLOCK_LENGTH); + var builder = blockFactory.newDoubleBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendDouble(i); } yield builder.build(); } case BOOLEANS -> { - BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(BLOCK_LENGTH); + BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendBoolean(i % 2 == 1); } yield builder.build(); } case BYTES_REFS -> { - BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(BLOCK_LENGTH); + BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(BLOCK_LENGTH); for (int i = 0; i < BLOCK_LENGTH; i++) { builder.appendBytesRef(new BytesRef(Integer.toString(i))); } diff --git a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java index afe8377d3e58c..66389c9e11ded 100644 --- a/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java +++ b/benchmarks/src/main/java/org/elasticsearch/benchmark/compute/operator/ValuesSourceReaderBenchmark.java @@ -22,7 +22,9 @@ import org.apache.lucene.store.Directory; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.NumericUtils; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.lucene.Lucene; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; @@ -78,6 +80,11 @@ public class ValuesSourceReaderBenchmark { private static final int BLOCK_LENGTH = 16 * 1024; private static final int INDEX_SIZE = 10 * BLOCK_LENGTH; private static final int COMMIT_INTERVAL = 500; + private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE; + private static final BlockFactory blockFactory = BlockFactory.getInstance( + new NoopCircuitBreaker("noop"), + BigArrays.NON_RECYCLING_INSTANCE + ); static { // Smoke test all the expected values and force loading subclasses more like prod @@ -241,7 +248,7 @@ private static BlockLoader numericBlockLoader(String name, Where where, NumberFi @OperationsPerInvocation(INDEX_SIZE) public void benchmark() { ValuesSourceReaderOperator op = new ValuesSourceReaderOperator( - BlockFactory.getNonBreakingInstance(), + blockFactory, fields(name), List.of(new ValuesSourceReaderOperator.ShardContext(reader, () -> { throw new UnsupportedOperationException("can't load _source here"); @@ -374,7 +381,7 @@ private void setupPages() { pages = new ArrayList<>(); switch (layout) { case "in_order" -> { - IntVector.Builder docs = IntVector.newVectorBuilder(BLOCK_LENGTH); + IntVector.Builder docs = blockFactory.newIntVectorBuilder(BLOCK_LENGTH); for (LeafReaderContext ctx : reader.leaves()) { int begin = 0; while (begin < ctx.reader().maxDoc()) { @@ -385,14 +392,14 @@ private void setupPages() { pages.add( new Page( new DocVector( - IntBlock.newConstantBlockWith(0, end - begin).asVector(), - IntBlock.newConstantBlockWith(ctx.ord, end - begin).asVector(), + blockFactory.newConstantIntBlockWith(0, end - begin).asVector(), + blockFactory.newConstantIntBlockWith(ctx.ord, end - begin).asVector(), docs.build(), true ).asBlock() ) ); - docs = IntVector.newVectorBuilder(BLOCK_LENGTH); + docs = blockFactory.newIntVectorBuilder(BLOCK_LENGTH); begin = end; } } @@ -403,8 +410,8 @@ record ItrAndOrd(PrimitiveIterator.OfInt itr, int ord) {} for (LeafReaderContext ctx : reader.leaves()) { docItrs.add(new ItrAndOrd(IntStream.range(0, ctx.reader().maxDoc()).iterator(), ctx.ord)); } - IntVector.Builder docs = IntVector.newVectorBuilder(BLOCK_LENGTH); - IntVector.Builder leafs = IntVector.newVectorBuilder(BLOCK_LENGTH); + IntVector.Builder docs = blockFactory.newIntVectorBuilder(BLOCK_LENGTH); + IntVector.Builder leafs = blockFactory.newIntVectorBuilder(BLOCK_LENGTH); int size = 0; while (docItrs.isEmpty() == false) { Iterator itrItr = docItrs.iterator(); @@ -420,12 +427,11 @@ record ItrAndOrd(PrimitiveIterator.OfInt itr, int ord) {} if (size >= BLOCK_LENGTH) { pages.add( new Page( - new DocVector(IntBlock.newConstantBlockWith(0, size).asVector(), leafs.build(), docs.build(), null) - .asBlock() + new DocVector(blockFactory.newConstantIntVector(0, size), leafs.build(), docs.build(), null).asBlock() ) ); - docs = IntVector.newVectorBuilder(BLOCK_LENGTH); - leafs = IntVector.newVectorBuilder(BLOCK_LENGTH); + docs = blockFactory.newIntVectorBuilder(BLOCK_LENGTH); + leafs = blockFactory.newIntVectorBuilder(BLOCK_LENGTH); size = 0; } } @@ -434,7 +440,7 @@ record ItrAndOrd(PrimitiveIterator.OfInt itr, int ord) {} pages.add( new Page( new DocVector( - IntBlock.newConstantBlockWith(0, size).asVector(), + blockFactory.newConstantIntBlockWith(0, size).asVector(), leafs.build().asBlock().asVector(), docs.build(), null @@ -460,9 +466,9 @@ record ItrAndOrd(PrimitiveIterator.OfInt itr, int ord) {} pages.add( new Page( new DocVector( - IntBlock.newConstantBlockWith(0, 1).asVector(), - IntBlock.newConstantBlockWith(next.ord, 1).asVector(), - IntBlock.newConstantBlockWith(next.itr.nextInt(), 1).asVector(), + blockFactory.newConstantIntVector(0, 1), + blockFactory.newConstantIntVector(next.ord, 1), + blockFactory.newConstantIntVector(next.itr.nextInt(), 1), true ).asBlock() ) diff --git a/build-conventions/build.gradle b/build-conventions/build.gradle index 0065492a735b4..cd9a548a9901f 100644 --- a/build-conventions/build.gradle +++ b/build-conventions/build.gradle @@ -92,3 +92,7 @@ project.getPlugins().withType(JavaBasePlugin.class) { } } } + +tasks.withType(JavaCompile).configureEach { + options.incremental = System.getenv("JENKINS_URL") == null && System.getenv("BUILDKITE_BUILD_URL") == null && System.getProperty("isCI") == null +} diff --git a/build-tools-internal/build.gradle b/build-tools-internal/build.gradle index 738b3f62803ab..a3b41283764a1 100644 --- a/build-tools-internal/build.gradle +++ b/build-tools-internal/build.gradle @@ -229,6 +229,7 @@ sourceSets { tasks.withType(JavaCompile).configureEach { options.encoding = 'UTF-8' + options.incremental = System.getenv("JENKINS_URL") == null && System.getenv("BUILDKITE_BUILD_URL") == null && System.getProperty("isCI") == null } tasks.named('licenseHeaders').configure { @@ -294,6 +295,8 @@ dependencies { compileOnly buildLibs.checkstyle compileOnly buildLibs.reflections + implementation 'com.github.javaparser:javaparser-core:3.18.0' + runtimeOnly "org.elasticsearch.gradle:reaper:$version" testImplementation buildLibs.checkstyle testImplementation buildLibs.wiremock diff --git a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle index f691d4bd996a7..aaae18401685a 100644 --- a/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle +++ b/build-tools-internal/src/main/groovy/elasticsearch.fips.gradle @@ -79,6 +79,7 @@ if (BuildParams.inFipsJvm) { // with no x-pack. Tests having security explicitly enabled/disabled will override this setting setting 'xpack.security.enabled', 'false' setting 'xpack.security.fips_mode.enabled', 'true' + setting 'xpack.security.fips_mode.required_providers', '["BCFIPS", "BCJSSE"]' setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.authc.password_hashing.algorithm', 'pbkdf2_stretch' keystorePassword 'keystore-password' diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java index d71c893cdd20f..3d6d37575eca9 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/BwcSetupExtension.java @@ -109,6 +109,10 @@ private TaskProvider createRunBwcGradleTask( loggedExec.args("-Dorg.elasticsearch.build.cache.url=" + buildCacheUrl); } + if (System.getProperty("isCI") != null) { + loggedExec.args("-DisCI"); + } + loggedExec.args("-Dbuild.snapshot=true", "-Dscan.tag.NESTED"); final LogLevel logLevel = project.getGradle().getStartParameter().getLogLevel(); List nonDefaultLogLevels = Arrays.asList(LogLevel.QUIET, LogLevel.WARN, LogLevel.INFO, LogLevel.DEBUG); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java index bad3ebb11a0dd..f0604ab33ceec 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchBuildCompletePlugin.java @@ -134,7 +134,7 @@ public void execute(BuildFinishedFlowAction.Parameters parameters) throws FileNo } uploadFile.getParentFile().mkdirs(); createBuildArchiveTar(parameters.getFilteredFiles().get(), parameters.getProjectDir().get(), uploadFile); - if (uploadFile.exists() && System.getenv("BUILDKITE").equals("true")) { + if (uploadFile.exists() && "true".equals(System.getenv("BUILDKITE"))) { String uploadFilePath = "build/" + uploadFile.getName(); try { System.out.println("Uploading buildkite artifact: " + uploadFilePath + "..."); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java index 4f9a7284c83e1..4a695e93ebdfe 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchJavaBasePlugin.java @@ -132,6 +132,7 @@ public static void configureCompile(Project project) { compileTask.getConventionMapping().map("sourceCompatibility", () -> java.getSourceCompatibility().toString()); compileTask.getConventionMapping().map("targetCompatibility", () -> java.getTargetCompatibility().toString()); compileOptions.getRelease().set(releaseVersionProviderFromCompileTask(project, compileTask)); + compileOptions.setIncremental(BuildParams.isCi() == false); }); // also apply release flag to groovy, which is used in build-tools project.getTasks().withType(GroovyCompile.class).configureEach(compileTask -> { diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java index f1804064b7e07..31b62c4ac700f 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/ElasticsearchTestBasePlugin.java @@ -108,10 +108,7 @@ public void execute(Task t) { "--add-opens=java.base/java.nio.file=ALL-UNNAMED", "--add-opens=java.base/java.time=ALL-UNNAMED", "--add-opens=java.management/java.lang.management=ALL-UNNAMED", - "-XX:+HeapDumpOnOutOfMemoryError", - // REMOVE once bumped to a JDK greater than 21.0.1, https://github.com/elastic/elasticsearch/issues/103004 - "-XX:CompileCommand=exclude,org.apache.lucene.util.MSBRadixSorter::computeCommonPrefixLengthAndBuildHistogram", - "-XX:CompileCommand=exclude,org.apache.lucene.util.RadixSelector::computeCommonPrefixLengthAndBuildHistogram" + "-XX:+HeapDumpOnOutOfMemoryError" ); test.getJvmArgumentProviders().add(new SimpleCommandLineArgumentProvider("-XX:HeapDumpPath=" + heapdumpDir)); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java index 115c4b0694141..5e62790a9d78a 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/info/GlobalBuildInfoPlugin.java @@ -124,7 +124,9 @@ public void apply(Project project) { params.setGitOrigin(gitInfo.getOrigin()); params.setBuildDate(ZonedDateTime.now(ZoneOffset.UTC)); params.setTestSeed(getTestSeed()); - params.setIsCi(System.getenv("JENKINS_URL") != null || System.getenv("BUILDKITE_BUILD_URL") != null); + params.setIsCi( + System.getenv("JENKINS_URL") != null || System.getenv("BUILDKITE_BUILD_URL") != null || System.getProperty("isCI") != null + ); params.setDefaultParallel(ParallelDetector.findDefaultParallel(project)); params.setInFipsJvm(Util.getBooleanProperty("tests.fips.enabled", false)); params.setIsSnapshotBuild(Util.getBooleanProperty("build.snapshot", true)); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheCacheableTestFixtures.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheCacheableTestFixtures.java index a01b1c28a851f..bfc52adcdecfd 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheCacheableTestFixtures.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheCacheableTestFixtures.java @@ -25,7 +25,9 @@ import java.io.File; import java.io.IOException; +import java.lang.reflect.Constructor; import java.lang.reflect.Method; +import java.lang.reflect.Modifier; import java.net.MalformedURLException; import java.net.URL; import java.net.URLClassLoader; @@ -74,9 +76,14 @@ public void execute() { Set> classes = (Set>) reflections.getSubTypesOf(ifClass); for (Class cacheableTestFixtureClazz : classes) { - Object o = cacheableTestFixtureClazz.getDeclaredConstructor().newInstance(); - Method cacheMethod = cacheableTestFixtureClazz.getMethod("cache"); - cacheMethod.invoke(o); + if (Modifier.isAbstract(cacheableTestFixtureClazz.getModifiers()) == false) { + Constructor declaredConstructor = cacheableTestFixtureClazz.getDeclaredConstructor(); + declaredConstructor.setAccessible(true); + Object o = declaredConstructor.newInstance(); + Method cacheMethod = cacheableTestFixtureClazz.getMethod("cache"); + System.out.println("Caching resources from " + cacheableTestFixtureClazz.getName()); + cacheMethod.invoke(o); + } } } catch (Exception e) { throw new RuntimeException(e); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java index f8ab8eef1004c..c8ce9d5ca2c71 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/packer/CacheTestFixtureResourcesPlugin.java @@ -12,6 +12,7 @@ import org.elasticsearch.gradle.internal.ResolveAllDependencies; import org.gradle.api.Plugin; import org.gradle.api.Project; +import org.gradle.api.artifacts.Dependency; import org.gradle.api.artifacts.dsl.DependencyHandler; import org.gradle.api.plugins.JavaPlugin; import org.gradle.api.plugins.JavaPluginExtension; @@ -26,9 +27,12 @@ public void apply(Project project) { var cacheTestFixturesConfiguration = project.getConfigurations().create(CACHE_TEST_FIXTURES); cacheTestFixturesConfiguration.defaultDependencies(deps -> { DependencyHandler dependencyHandler = project.getDependencies(); - deps.add(dependencyHandler.create("org.reflections:reflections:" + VersionProperties.getVersions().get("reflections"))); - deps.add(dependencyHandler.create("org.javassist:javassist:" + VersionProperties.getVersions().get("javassist"))); + Dependency reflections = dependencyHandler.create( + "org.reflections:reflections:" + VersionProperties.getVersions().get("reflections") + ); + deps.add(reflections); }); + project.getPlugins().withType(JavaPlugin.class, javaPlugin -> { var cacheTestFixtures = project.getTasks().register(CACHE_TEST_FIXTURES, CacheCacheableTestFixtures.class, (t) -> { var testSourceSet = project.getExtensions() diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java index f9f831439f2ca..6c978edd48c29 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/ReleaseToolsPlugin.java @@ -47,6 +47,9 @@ public void apply(Project project) { final Version version = VersionProperties.getElasticsearchVersion(); + project.getTasks() + .register("updateVersions", UpdateVersionsTask.class, t -> project.getTasks().named("spotlessApply").get().mustRunAfter(t)); + final FileTree yamlFiles = projectDirectory.dir("docs/changelog") .getAsFileTree() .matching(new PatternSet().include("**/*.yml", "**/*.yaml")); diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTask.java new file mode 100644 index 0000000000000..f8073f384b871 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTask.java @@ -0,0 +1,215 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import com.github.javaparser.StaticJavaParser; +import com.github.javaparser.ast.CompilationUnit; +import com.github.javaparser.ast.NodeList; +import com.github.javaparser.ast.body.ClassOrInterfaceDeclaration; +import com.github.javaparser.ast.body.FieldDeclaration; +import com.github.javaparser.ast.body.VariableDeclarator; +import com.github.javaparser.ast.expr.NameExpr; +import com.github.javaparser.printer.lexicalpreservation.LexicalPreservingPrinter; +import com.google.common.annotations.VisibleForTesting; + +import org.elasticsearch.gradle.Version; +import org.gradle.api.DefaultTask; +import org.gradle.api.logging.Logger; +import org.gradle.api.logging.Logging; +import org.gradle.api.tasks.TaskAction; +import org.gradle.api.tasks.options.Option; +import org.gradle.initialization.layout.BuildLayout; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardOpenOption; +import java.util.Map; +import java.util.NavigableMap; +import java.util.Objects; +import java.util.Optional; +import java.util.TreeMap; +import java.util.regex.Matcher; +import java.util.regex.Pattern; +import java.util.stream.Collectors; + +import javax.annotation.Nullable; +import javax.inject.Inject; + +public class UpdateVersionsTask extends DefaultTask { + private static final Logger LOGGER = Logging.getLogger(UpdateVersionsTask.class); + + static final String SERVER_MODULE_PATH = "server/src/main/java/"; + static final String VERSION_FILE_PATH = SERVER_MODULE_PATH + "org/elasticsearch/Version.java"; + + static final Pattern VERSION_FIELD = Pattern.compile("V_(\\d+)_(\\d+)_(\\d+)(?:_(\\w+))?"); + + final Path rootDir; + + @Nullable + private Version addVersion; + private boolean setCurrent; + @Nullable + private Version removeVersion; + + @Inject + public UpdateVersionsTask(BuildLayout layout) { + rootDir = layout.getRootDirectory().toPath(); + } + + @Option(option = "add-version", description = "Specifies the version to add") + public void addVersion(String version) { + this.addVersion = Version.fromString(version); + } + + @Option(option = "set-current", description = "Set the 'current' constant to the new version") + public void setCurrent(boolean setCurrent) { + this.setCurrent = setCurrent; + } + + @Option(option = "remove-version", description = "Specifies the version to remove") + public void removeVersion(String version) { + this.removeVersion = Version.fromString(version); + } + + static String toVersionField(Version version) { + return String.format("V_%d_%d_%d", version.getMajor(), version.getMinor(), version.getRevision()); + } + + static Optional parseVersionField(CharSequence field) { + Matcher m = VERSION_FIELD.matcher(field); + if (m.find() == false) return Optional.empty(); + + return Optional.of( + new Version(Integer.parseInt(m.group(1)), Integer.parseInt(m.group(2)), Integer.parseInt(m.group(3)), m.group(4)) + ); + } + + @TaskAction + public void executeTask() throws IOException { + if (addVersion == null && removeVersion == null) { + throw new IllegalArgumentException("No versions to add or remove specified"); + } + if (setCurrent && addVersion == null) { + throw new IllegalArgumentException("No new version added to set as the current version"); + } + if (Objects.equals(addVersion, removeVersion)) { + throw new IllegalArgumentException("Same version specified to add and remove"); + } + + Path versionJava = rootDir.resolve(VERSION_FILE_PATH); + CompilationUnit file = LexicalPreservingPrinter.setup(StaticJavaParser.parse(versionJava)); + + Optional modifiedFile = Optional.empty(); + if (addVersion != null) { + LOGGER.lifecycle("Adding new version [{}] to [{}]", addVersion, versionJava); + var added = addVersionConstant(modifiedFile.orElse(file), addVersion, setCurrent); + if (added.isPresent()) { + modifiedFile = added; + } + } + if (removeVersion != null) { + LOGGER.lifecycle("Removing version [{}] from [{}]", removeVersion, versionJava); + var removed = removeVersionConstant(modifiedFile.orElse(file), removeVersion); + if (removed.isPresent()) { + modifiedFile = removed; + } + } + + if (modifiedFile.isPresent()) { + writeOutNewContents(versionJava, modifiedFile.get()); + } + } + + @VisibleForTesting + static Optional addVersionConstant(CompilationUnit versionJava, Version version, boolean updateCurrent) { + String newFieldName = toVersionField(version); + + ClassOrInterfaceDeclaration versionClass = versionJava.getClassByName("Version").get(); + if (versionClass.getFieldByName(newFieldName).isPresent()) { + LOGGER.lifecycle("New version constant [{}] already present, skipping", newFieldName); + return Optional.empty(); + } + + NavigableMap versions = versionClass.getFields() + .stream() + .map(f -> Map.entry(f, parseVersionField(f.getVariable(0).getNameAsString()))) + .filter(e -> e.getValue().isPresent()) + .collect(Collectors.toMap(e -> e.getValue().get(), Map.Entry::getKey, (v1, v2) -> { + throw new IllegalArgumentException("Duplicate version constants " + v1); + }, TreeMap::new)); + + // find the version this should be inserted after + var previousVersion = versions.lowerEntry(version); + if (previousVersion == null) { + throw new IllegalStateException(String.format("Could not find previous version to [%s]", version)); + } + FieldDeclaration newVersion = createNewVersionConstant( + previousVersion.getValue(), + newFieldName, + String.format("%d_%02d_%02d_99", version.getMajor(), version.getMinor(), version.getRevision()) + ); + versionClass.getMembers().addAfter(newVersion, previousVersion.getValue()); + + if (updateCurrent) { + versionClass.getFieldByName("CURRENT") + .orElseThrow(() -> new IllegalArgumentException("Could not find CURRENT constant")) + .getVariable(0) + .setInitializer(new NameExpr(newFieldName)); + } + + return Optional.of(versionJava); + } + + private static FieldDeclaration createNewVersionConstant(FieldDeclaration lastVersion, String newName, String newExpr) { + return new FieldDeclaration( + new NodeList<>(lastVersion.getModifiers()), + new VariableDeclarator( + lastVersion.getCommonType(), + newName, + StaticJavaParser.parseExpression(String.format("new Version(%s)", newExpr)) + ) + ); + } + + @VisibleForTesting + static Optional removeVersionConstant(CompilationUnit versionJava, Version version) { + String removeFieldName = toVersionField(version); + + ClassOrInterfaceDeclaration versionClass = versionJava.getClassByName("Version").get(); + var declaration = versionClass.getFieldByName(removeFieldName); + if (declaration.isEmpty()) { + LOGGER.lifecycle("Version constant [{}] not found, skipping", removeFieldName); + return Optional.empty(); + } + + // check if this is referenced by CURRENT + String currentReference = versionClass.getFieldByName("CURRENT") + .orElseThrow(() -> new IllegalArgumentException("Could not find CURRENT constant")) + .getVariable(0) + .getInitializer() + .get() + .asNameExpr() + .getNameAsString(); + if (currentReference.equals(removeFieldName)) { + throw new IllegalArgumentException(String.format("Cannot remove version [%s], it is referenced by CURRENT", version)); + } + + declaration.get().remove(); + + return Optional.of(versionJava); + } + + static void writeOutNewContents(Path file, CompilationUnit unit) throws IOException { + if (unit.containsData(LexicalPreservingPrinter.NODE_TEXT_DATA) == false) { + throw new IllegalArgumentException("CompilationUnit has no lexical information for output"); + } + Files.writeString(file, LexicalPreservingPrinter.print(unit), StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING); + } +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixtureTask.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixtureTask.java new file mode 100644 index 0000000000000..da7bcfa289808 --- /dev/null +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixtureTask.java @@ -0,0 +1,18 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ +package org.elasticsearch.gradle.internal.testfixtures; + +import org.gradle.api.DefaultTask; +import org.gradle.api.file.DirectoryProperty; +import org.gradle.api.tasks.Internal; + +public abstract class TestFixtureTask extends DefaultTask { + + @Internal + abstract DirectoryProperty getFixturesDir(); +} diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java index 89e8747ee814d..c50ff97498c31 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/testfixtures/TestFixturesPlugin.java @@ -81,37 +81,36 @@ public void apply(Project project) { ); ExtraPropertiesExtension ext = project.getExtensions().getByType(ExtraPropertiesExtension.class); - File testfixturesDir = project.file("testfixtures_shared"); - ext.set("testFixturesDir", testfixturesDir); + File testFixturesDir = project.file("testfixtures_shared"); + ext.set("testFixturesDir", testFixturesDir); if (project.file(DOCKER_COMPOSE_YML).exists()) { project.getPluginManager().apply(BasePlugin.class); project.getPluginManager().apply(DockerComposePlugin.class); - - TaskProvider preProcessFixture = project.getTasks().register("preProcessFixture", t -> { - t.doFirst(new Action() { - @Override - public void execute(Task task) { - try { - Files.createDirectories(testfixturesDir.toPath()); - } catch (IOException e) { - throw new UncheckedIOException(e); - } + TaskProvider preProcessFixture = project.getTasks().register("preProcessFixture", TestFixtureTask.class, t -> { + t.getFixturesDir().set(testFixturesDir); + t.doFirst(task -> { + try { + Files.createDirectories(testFixturesDir.toPath()); + } catch (IOException e) { + throw new UncheckedIOException(e); } }); }); TaskProvider buildFixture = project.getTasks() .register("buildFixture", t -> t.dependsOn(preProcessFixture, tasks.named("composeUp"))); - TaskProvider postProcessFixture = project.getTasks().register("postProcessFixture", task -> { - task.dependsOn(buildFixture); - configureServiceInfoForTask( - task, - project, - false, - (name, port) -> task.getExtensions().getByType(ExtraPropertiesExtension.class).set(name, port) - ); - }); + TaskProvider postProcessFixture = project.getTasks() + .register("postProcessFixture", TestFixtureTask.class, task -> { + task.getFixturesDir().set(testFixturesDir); + task.dependsOn(buildFixture); + configureServiceInfoForTask( + task, + project, + false, + (name, port) -> task.getExtensions().getByType(ExtraPropertiesExtension.class).set(name, port) + ); + }); maybeSkipTask(dockerSupport, preProcessFixture); maybeSkipTask(dockerSupport, postProcessFixture); @@ -138,7 +137,7 @@ public void execute(Task task) { t.mustRunAfter(preProcessFixture); }); tasks.named("composePull").configure(t -> t.mustRunAfter(preProcessFixture)); - tasks.named("composeDown").configure(t -> t.doLast(t2 -> getFileSystemOperations().delete(d -> d.delete(testfixturesDir)))); + tasks.named("composeDown").configure(t -> t.doLast(t2 -> getFileSystemOperations().delete(d -> d.delete(testFixturesDir)))); } else { project.afterEvaluate(spec -> { if (extension.fixtures.isEmpty()) { @@ -179,7 +178,7 @@ private void maybeSkipTasks(TaskContainer tasks, Provider tasks.withType(taskClass).configureEach(t -> maybeSkipTask(dockerSupport, t)); } - private void maybeSkipTask(Provider dockerSupport, TaskProvider task) { + private void maybeSkipTask(Provider dockerSupport, TaskProvider task) { task.configure(t -> maybeSkipTask(dockerSupport, t)); } diff --git a/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTaskTests.java b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTaskTests.java new file mode 100644 index 0000000000000..97441990d47c2 --- /dev/null +++ b/build-tools-internal/src/test/java/org/elasticsearch/gradle/internal/release/UpdateVersionsTaskTests.java @@ -0,0 +1,244 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.gradle.internal.release; + +import com.github.javaparser.StaticJavaParser; +import com.github.javaparser.ast.CompilationUnit; +import com.github.javaparser.ast.Node; +import com.github.javaparser.ast.body.FieldDeclaration; +import com.github.javaparser.printer.lexicalpreservation.LexicalPreservingPrinter; + +import org.elasticsearch.gradle.Version; +import org.junit.Test; + +import java.io.StringWriter; +import java.nio.file.Path; +import java.util.List; +import java.util.Optional; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasSize; +import static org.hamcrest.Matchers.hasToString; +import static org.hamcrest.Matchers.is; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertThrows; + +public class UpdateVersionsTaskTests { + + @Test + public void addVersion_versionExists() { + final String versionJava = """ + public class Version { + public static final Version V_8_10_0 = new Version(8_10_00_99); + public static final Version V_8_10_1 = new Version(8_10_01_99); + public static final Version V_8_11_0 = new Version(8_11_00_99); + public static final Version CURRENT = V_8_11_0; + }"""; + + CompilationUnit unit = StaticJavaParser.parse(versionJava); + + var newUnit = UpdateVersionsTask.addVersionConstant(unit, Version.fromString("8.10.1"), false); + assertThat(newUnit.isPresent(), is(false)); + } + + @Test + public void addVersion_oldVersion() { + final String versionJava = """ + public class Version { + public static final Version V_8_10_0 = new Version(8_10_00_99); + public static final Version V_8_10_1 = new Version(8_10_01_99); + public static final Version V_8_11_0 = new Version(8_11_00_99); + public static final Version CURRENT = V_8_11_0; + }"""; + final String updatedVersionJava = """ + public class Version { + + public static final Version V_8_10_0 = new Version(8_10_00_99); + + public static final Version V_8_10_1 = new Version(8_10_01_99); + + public static final Version V_8_10_2 = new Version(8_10_02_99); + + public static final Version V_8_11_0 = new Version(8_11_00_99); + + public static final Version CURRENT = V_8_11_0; + } + """; + + CompilationUnit unit = StaticJavaParser.parse(versionJava); + + UpdateVersionsTask.addVersionConstant(unit, Version.fromString("8.10.2"), false); + + assertThat(unit, hasToString(updatedVersionJava)); + } + + @Test + public void addVersion_newVersion_current() { + final String versionJava = """ + public class Version { + public static final Version V_8_10_0 = new Version(8_10_00_99); + public static final Version V_8_10_1 = new Version(8_10_01_99); + public static final Version V_8_11_0 = new Version(8_11_00_99); + public static final Version CURRENT = V_8_11_0; + }"""; + final String updatedVersionJava = """ + public class Version { + + public static final Version V_8_10_0 = new Version(8_10_00_99); + + public static final Version V_8_10_1 = new Version(8_10_01_99); + + public static final Version V_8_11_0 = new Version(8_11_00_99); + + public static final Version V_8_11_1 = new Version(8_11_01_99); + + public static final Version CURRENT = V_8_11_1; + } + """; + + CompilationUnit unit = StaticJavaParser.parse(versionJava); + + UpdateVersionsTask.addVersionConstant(unit, Version.fromString("8.11.1"), true); + + assertThat(unit, hasToString(updatedVersionJava)); + } + + @Test + public void removeVersion_versionDoesntExist() { + final String versionJava = """ + public class Version { + public static final Version V_8_10_0 = new Version(8_10_00_99); + public static final Version V_8_10_1 = new Version(8_10_01_99); + public static final Version V_8_11_0 = new Version(8_11_00_99); + public static final Version CURRENT = V_8_11_0; + }"""; + + CompilationUnit unit = StaticJavaParser.parse(versionJava); + + var newUnit = UpdateVersionsTask.removeVersionConstant(unit, Version.fromString("8.10.2")); + assertThat(newUnit.isPresent(), is(false)); + } + + @Test + public void removeVersion_versionIsCurrent() { + final String versionJava = """ + public class Version { + public static final Version V_8_10_0 = new Version(8_10_00_99); + public static final Version V_8_10_1 = new Version(8_10_01_99); + public static final Version V_8_11_0 = new Version(8_11_00_99); + public static final Version CURRENT = V_8_11_0; + }"""; + + CompilationUnit unit = StaticJavaParser.parse(versionJava); + + var ex = assertThrows( + IllegalArgumentException.class, + () -> UpdateVersionsTask.removeVersionConstant(unit, Version.fromString("8.11.0")) + ); + assertThat(ex.getMessage(), equalTo("Cannot remove version [8.11.0], it is referenced by CURRENT")); + } + + @Test + public void removeVersion() { + final String versionJava = """ + public class Version { + public static final Version V_8_10_0 = new Version(8_10_00_99); + public static final Version V_8_10_1 = new Version(8_10_01_99); + public static final Version V_8_11_0 = new Version(8_11_00_99); + public static final Version CURRENT = V_8_11_0; + }"""; + final String updatedVersionJava = """ + public class Version { + + public static final Version V_8_10_0 = new Version(8_10_00_99); + + public static final Version V_8_11_0 = new Version(8_11_00_99); + + public static final Version CURRENT = V_8_11_0; + } + """; + + CompilationUnit unit = StaticJavaParser.parse(versionJava); + + UpdateVersionsTask.removeVersionConstant(unit, Version.fromString("8.10.1")); + + assertThat(unit, hasToString(updatedVersionJava)); + } + + @Test + public void updateVersionFile_addsCorrectly() throws Exception { + Version newVersion = new Version(50, 10, 20); + String versionField = UpdateVersionsTask.toVersionField(newVersion); + + Path versionFile = Path.of("..", UpdateVersionsTask.VERSION_FILE_PATH); + CompilationUnit unit = LexicalPreservingPrinter.setup(StaticJavaParser.parse(versionFile)); + assertFalse("Test version already exists in the file", findFirstField(unit, versionField).isPresent()); + + List existingFields = unit.findAll(FieldDeclaration.class); + + var result = UpdateVersionsTask.addVersionConstant(unit, newVersion, true); + assertThat(result.isPresent(), is(true)); + + // write out & parse back in again + StringWriter writer = new StringWriter(); + LexicalPreservingPrinter.print(unit, writer); + unit = StaticJavaParser.parse(writer.toString()); + + // a field has been added + assertThat(unit.findAll(FieldDeclaration.class), hasSize(existingFields.size() + 1)); + // the field has the right name + var field = findFirstField(unit, versionField); + assertThat(field.isPresent(), is(true)); + // the field has the right constant + assertThat( + field.get().getVariable(0).getInitializer().get(), + hasToString( + String.format("new Version(%d_%02d_%02d_99)", newVersion.getMajor(), newVersion.getMinor(), newVersion.getRevision()) + ) + ); + // and CURRENT has been updated + var current = findFirstField(unit, "CURRENT"); + assertThat(current.get().getVariable(0).getInitializer().get(), hasToString(versionField)); + } + + @Test + public void updateVersionFile_removesCorrectly() throws Exception { + Path versionFile = Path.of("..", UpdateVersionsTask.VERSION_FILE_PATH); + CompilationUnit unit = LexicalPreservingPrinter.setup(StaticJavaParser.parse(versionFile)); + + List existingFields = unit.findAll(FieldDeclaration.class); + + var staticVersionFields = unit.findAll( + FieldDeclaration.class, + f -> f.isStatic() && f.getVariable(0).getTypeAsString().equals("Version") + ); + // remove the last-but-two static version field (skip CURRENT and the latest version) + String constant = staticVersionFields.get(staticVersionFields.size() - 3).getVariable(0).getNameAsString(); + + Version versionToRemove = UpdateVersionsTask.parseVersionField(constant).orElseThrow(AssertionError::new); + var result = UpdateVersionsTask.removeVersionConstant(unit, versionToRemove); + assertThat(result.isPresent(), is(true)); + + // write out & parse back in again + StringWriter writer = new StringWriter(); + LexicalPreservingPrinter.print(unit, writer); + unit = StaticJavaParser.parse(writer.toString()); + + // a field has been removed + assertThat(unit.findAll(FieldDeclaration.class), hasSize(existingFields.size() - 1)); + // the removed field does not exist + var field = findFirstField(unit, constant); + assertThat(field.isPresent(), is(false)); + } + + private static Optional findFirstField(Node node, String name) { + return node.findFirst(FieldDeclaration.class, f -> f.getVariable(0).getName().getIdentifier().equals(name)); + } +} diff --git a/build-tools-internal/version.properties b/build-tools-internal/version.properties index c34bdc95046b3..54bc80e0c08c2 100644 --- a/build-tools-internal/version.properties +++ b/build-tools-internal/version.properties @@ -1,5 +1,5 @@ elasticsearch = 8.13.0 -lucene = 9.9.0-snapshot-bb4fec631e6 +lucene = 9.9.1 bundled_jdk_vendor = openjdk bundled_jdk = 21.0.1+12@415e3f918a1f4062a0074a2794853d0d @@ -48,8 +48,7 @@ ductTape = 1.0.8 commonsCompress = 1.24.0 # packer caching build logic -reflections = 0.9.12 -javassist = 3.28.0-GA +reflections = 0.10.2 # benchmark dependencies jmh = 1.26 diff --git a/build-tools/build.gradle b/build-tools/build.gradle index 3fe2639bfe2a4..eb5573ac03e0e 100644 --- a/build-tools/build.gradle +++ b/build-tools/build.gradle @@ -158,6 +158,10 @@ dependencies { } +tasks.withType(JavaCompile).configureEach { + options.incremental = System.getenv("JENKINS_URL") == null && System.getenv("BUILDKITE_BUILD_URL") == null && System.getProperty("isCI") == null +} + tasks.named('test').configure { useJUnitPlatform() } diff --git a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java index baefb15e6373a..193a4ca818035 100644 --- a/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java +++ b/client/client-benchmark-noop-api-plugin/src/main/java/org/elasticsearch/plugin/noop/action/search/TransportNoopSearchAction.java @@ -21,7 +21,6 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.tasks.Task; @@ -45,15 +44,13 @@ public TransportNoopSearchAction(TransportService transportService, ActionFilter protected void doExecute(Task task, SearchRequest request, ActionListener listener) { listener.onResponse( new SearchResponse( - new InternalSearchResponse( - new SearchHits(new SearchHit[0], new TotalHits(0L, TotalHits.Relation.EQUAL_TO), 0.0f), - InternalAggregations.EMPTY, - new Suggest(Collections.emptyList()), - new SearchProfileResults(Collections.emptyMap()), - false, - false, - 1 - ), + new SearchHits(new SearchHit[0], new TotalHits(0L, TotalHits.Relation.EQUAL_TO), 0.0f), + InternalAggregations.EMPTY, + new Suggest(Collections.emptyList()), + false, + false, + new SearchProfileResults(Collections.emptyMap()), + 1, "", 1, 1, diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index 9e26582d58439..c5e905f461f45 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -58,10 +58,6 @@ # result in less optimal vector performance 20-:--add-modules=jdk.incubator.vector -# REMOVE once bumped to a JDK greater than 21.0.1, https://github.com/elastic/elasticsearch/issues/103004 -19-21:-XX:CompileCommand=exclude,org.apache.lucene.util.MSBRadixSorter::computeCommonPrefixLengthAndBuildHistogram -19-21:-XX:CompileCommand=exclude,org.apache.lucene.util.RadixSelector::computeCommonPrefixLengthAndBuildHistogram - ## heap dumps # generate a heap dump when an allocation from the Java heap fails; heap dumps diff --git a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/PluginsConfig.java b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/PluginsConfig.java index 8edd5f701706c..168e5ba3806f3 100644 --- a/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/PluginsConfig.java +++ b/distribution/tools/plugin-cli/src/main/java/org/elasticsearch/plugins/cli/PluginsConfig.java @@ -160,12 +160,11 @@ static PluginsConfig parseConfig(Path configPath, XContent xContent) throws IOEx parser.declareStringOrNull(PluginsConfig::setProxy, new ParseField("proxy")); parser.declareObjectArrayOrNull(PluginsConfig::setPlugins, descriptorParser, new ParseField("plugins")); - final XContentParser yamlXContentParser = xContent.createParser( - XContentParserConfiguration.EMPTY, - Files.newInputStream(configPath) - ); - - return parser.parse(yamlXContentParser, null); + try ( + XContentParser yamlXContentParser = xContent.createParser(XContentParserConfiguration.EMPTY, Files.newInputStream(configPath)) + ) { + return parser.parse(yamlXContentParser, null); + } } /** diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java index 29650e4b74114..d312fae4456f1 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/JvmOptionsParser.java @@ -10,6 +10,7 @@ import org.elasticsearch.bootstrap.ServerArgs; import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.ProcessInfo; import org.elasticsearch.cli.UserException; import java.io.BufferedReader; @@ -39,7 +40,7 @@ /** * Parses JVM options from a file and prints a single line with all JVM options to standard output. */ -final class JvmOptionsParser { +public final class JvmOptionsParser { static class JvmOptionsFileParserException extends Exception { @@ -59,7 +60,6 @@ SortedMap invalidLines() { this.jvmOptionsFile = jvmOptionsFile; this.invalidLines = invalidLines; } - } /** @@ -70,25 +70,27 @@ SortedMap invalidLines() { * variable. * * @param args the start-up arguments - * @param configDir the ES config dir + * @param processInfo information about the CLI process. * @param tmpDir the directory that should be passed to {@code -Djava.io.tmpdir} - * @param envOptions the options passed through the ES_JAVA_OPTS env var * @return the list of options to put on the Java command line * @throws InterruptedException if the java subprocess is interrupted * @throws IOException if there is a problem reading any of the files * @throws UserException if there is a problem parsing the `jvm.options` file or `jvm.options.d` files */ - static List determineJvmOptions(ServerArgs args, Path configDir, Path tmpDir, String envOptions) throws InterruptedException, + public static List determineJvmOptions(ServerArgs args, ProcessInfo processInfo, Path tmpDir) throws InterruptedException, IOException, UserException { - final JvmOptionsParser parser = new JvmOptionsParser(); final Map substitutions = new HashMap<>(); substitutions.put("ES_TMPDIR", tmpDir.toString()); - substitutions.put("ES_PATH_CONF", configDir.toString()); + substitutions.put("ES_PATH_CONF", args.configDir().toString()); + + final String envOptions = processInfo.envVars().get("ES_JAVA_OPTS"); try { - return parser.jvmOptions(args, configDir, tmpDir, envOptions, substitutions); + return Collections.unmodifiableList( + parser.jvmOptions(args, args.configDir(), tmpDir, envOptions, substitutions, processInfo.sysprops()) + ); } catch (final JvmOptionsFileParserException e) { final String errorMessage = String.format( Locale.ROOT, @@ -122,7 +124,8 @@ private List jvmOptions( final Path config, Path tmpDir, final String esJavaOpts, - final Map substitutions + final Map substitutions, + final Map cliSysprops ) throws InterruptedException, IOException, JvmOptionsFileParserException, UserException { final List jvmOptions = readJvmOptionsFiles(config); @@ -137,7 +140,7 @@ private List jvmOptions( ); substitutedJvmOptions.addAll(machineDependentHeap.determineHeapSettings(config, substitutedJvmOptions)); final List ergonomicJvmOptions = JvmErgonomics.choose(substitutedJvmOptions); - final List systemJvmOptions = SystemJvmOptions.systemJvmOptions(args.nodeSettings()); + final List systemJvmOptions = SystemJvmOptions.systemJvmOptions(args.nodeSettings(), cliSysprops); final List apmOptions = APMJvmOptions.apmJvmOptions(args.nodeSettings(), args.secrets(), args.logsDir(), tmpDir); diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/MachineDependentHeap.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/MachineDependentHeap.java index a30f3115be5c9..87c4883ca3073 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/MachineDependentHeap.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/MachineDependentHeap.java @@ -86,8 +86,7 @@ static class NodeRoleParser { @SuppressWarnings("unchecked") public static MachineNodeRole parse(InputStream config) { final Settings settings; - try { - var parser = YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, config); + try (var parser = YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, config)) { if (parser.currentToken() == null && parser.nextToken() == null) { settings = null; } else { diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java index ea2df72fb2c0b..aac5f718081b4 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerCli.java @@ -243,8 +243,15 @@ protected Command loadTool(String toolname, String libs) { } // protected to allow tests to override - protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args) throws UserException { - return ServerProcess.start(terminal, processInfo, args); + protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args) throws Exception { + var tempDir = ServerProcessUtils.setupTempDir(processInfo); + var jvmOptions = JvmOptionsParser.determineJvmOptions(args, processInfo, tempDir); + var serverProcessBuilder = new ServerProcessBuilder().withTerminal(terminal) + .withProcessInfo(processInfo) + .withServerArgs(args) + .withTempDir(tempDir) + .withJvmOptions(jvmOptions); + return serverProcessBuilder.start(); } // protected to allow tests to override diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java index d4b4d57977f5d..3972095a3a5c0 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcess.java @@ -9,34 +9,17 @@ package org.elasticsearch.server.cli; import org.elasticsearch.bootstrap.BootstrapInfo; -import org.elasticsearch.bootstrap.ServerArgs; -import org.elasticsearch.cli.ExitCodes; -import org.elasticsearch.cli.ProcessInfo; -import org.elasticsearch.cli.Terminal; -import org.elasticsearch.cli.UserException; -import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.core.PathUtils; -import org.elasticsearch.core.SuppressForbidden; import java.io.IOException; import java.io.OutputStream; -import java.io.UncheckedIOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.nio.file.Paths; -import java.nio.file.attribute.FileAttribute; -import java.util.ArrayList; -import java.util.HashMap; -import java.util.List; -import java.util.Map; import static org.elasticsearch.server.cli.ProcessUtil.nonInterruptible; /** * A helper to control a {@link Process} running the main Elasticsearch server. * - *

The process can be started by calling {@link #start(Terminal, ProcessInfo, ServerArgs)}. + *

The process can be started by calling {@link ServerProcessBuilder#start()}. * The process is controlled by internally sending arguments and control signals on stdin, * and receiving control signals on stderr. The start method does not return until the * server is ready to process requests and has exited the bootstrap thread. @@ -64,68 +47,6 @@ public class ServerProcess { this.errorPump = errorPump; } - // this allows mocking the process building by tests - interface OptionsBuilder { - List getJvmOptions(ServerArgs args, Path configDir, Path tmpDir, String envOptions) throws InterruptedException, - IOException, UserException; - } - - // this allows mocking the process building by tests - interface ProcessStarter { - Process start(ProcessBuilder pb) throws IOException; - } - - /** - * Start a server in a new process. - * - * @param terminal A terminal to connect the standard inputs and outputs to for the new process. - * @param processInfo Info about the current process, for passing through to the subprocess. - * @param args Arguments to the server process. - * @return A running server process that is ready for requests - * @throws UserException If the process failed during bootstrap - */ - public static ServerProcess start(Terminal terminal, ProcessInfo processInfo, ServerArgs args) throws UserException { - return start(terminal, processInfo, args, JvmOptionsParser::determineJvmOptions, ProcessBuilder::start); - } - - // package private so tests can mock options building and process starting - static ServerProcess start( - Terminal terminal, - ProcessInfo processInfo, - ServerArgs args, - OptionsBuilder optionsBuilder, - ProcessStarter processStarter - ) throws UserException { - Process jvmProcess = null; - ErrorPumpThread errorPump; - - boolean success = false; - try { - jvmProcess = createProcess(args, processInfo, args.configDir(), optionsBuilder, processStarter); - errorPump = new ErrorPumpThread(terminal.getErrorWriter(), jvmProcess.getErrorStream()); - errorPump.start(); - sendArgs(args, jvmProcess.getOutputStream()); - - String errorMsg = errorPump.waitUntilReady(); - if (errorMsg != null) { - // something bad happened, wait for the process to exit then rethrow - int exitCode = jvmProcess.waitFor(); - throw new UserException(exitCode, errorMsg); - } - success = true; - } catch (InterruptedException e) { - throw new RuntimeException(e); - } catch (IOException e) { - throw new UncheckedIOException(e); - } finally { - if (success == false && jvmProcess != null && jvmProcess.isAlive()) { - jvmProcess.destroyForcibly(); - } - } - - return new ServerProcess(jvmProcess, errorPump); - } - /** * Return the process id of the server. */ @@ -169,19 +90,6 @@ public synchronized void stop() { waitFor(); // ignore exit code, we are already shutting down } - private static void sendArgs(ServerArgs args, OutputStream processStdin) { - // DO NOT close the underlying process stdin, since we need to be able to write to it to signal exit - var out = new OutputStreamStreamOutput(processStdin); - try { - args.writeTo(out); - out.flush(); - } catch (IOException ignore) { - // A failure to write here means the process has problems, and it will die anyway. We let this fall through - // so the pump thread can complete, writing out the actual error. All we get here is the failure to write to - // the process pipe, which isn't helpful to print. - } - } - private void sendShutdownMarker() { try { OutputStream os = jvmProcess.getOutputStream(); @@ -191,80 +99,4 @@ private void sendShutdownMarker() { // process is already effectively dead, fall through to wait for it, or should we SIGKILL? } } - - private static Process createProcess( - ServerArgs args, - ProcessInfo processInfo, - Path configDir, - OptionsBuilder optionsBuilder, - ProcessStarter processStarter - ) throws InterruptedException, IOException, UserException { - Map envVars = new HashMap<>(processInfo.envVars()); - Path tempDir = setupTempDir(processInfo, envVars.remove("ES_TMPDIR")); - if (envVars.containsKey("LIBFFI_TMPDIR") == false) { - envVars.put("LIBFFI_TMPDIR", tempDir.toString()); - } - - List jvmOptions = optionsBuilder.getJvmOptions(args, configDir, tempDir, envVars.remove("ES_JAVA_OPTS")); - // also pass through distribution type - jvmOptions.add("-Des.distribution.type=" + processInfo.sysprops().get("es.distribution.type")); - - Path esHome = processInfo.workingDir(); - Path javaHome = PathUtils.get(processInfo.sysprops().get("java.home")); - List command = new ArrayList<>(); - boolean isWindows = processInfo.sysprops().get("os.name").startsWith("Windows"); - command.add(javaHome.resolve("bin").resolve("java" + (isWindows ? ".exe" : "")).toString()); - command.addAll(jvmOptions); - command.add("--module-path"); - command.add(esHome.resolve("lib").toString()); - // Special circumstances require some modules (not depended on by the main server module) to be explicitly added: - command.add("--add-modules=jdk.net"); // needed to reflectively set extended socket options - // we control the module path, which may have additional modules not required by server - command.add("--add-modules=ALL-MODULE-PATH"); - command.add("-m"); - command.add("org.elasticsearch.server/org.elasticsearch.bootstrap.Elasticsearch"); - - var builder = new ProcessBuilder(command); - builder.environment().putAll(envVars); - builder.redirectOutput(ProcessBuilder.Redirect.INHERIT); - - return processStarter.start(builder); - } - - /** - * Returns the java.io.tmpdir Elasticsearch should use, creating it if necessary. - * - *

On non-Windows OS, this will be created as a subdirectory of the default temporary directory. - * Note that this causes the created temporary directory to be a private temporary directory. - */ - private static Path setupTempDir(ProcessInfo processInfo, String tmpDirOverride) throws UserException, IOException { - final Path path; - if (tmpDirOverride != null) { - path = Paths.get(tmpDirOverride); - if (Files.exists(path) == false) { - throw new UserException(ExitCodes.CONFIG, "Temporary directory [" + path + "] does not exist or is not accessible"); - } - if (Files.isDirectory(path) == false) { - throw new UserException(ExitCodes.CONFIG, "Temporary directory [" + path + "] is not a directory"); - } - } else { - if (processInfo.sysprops().get("os.name").startsWith("Windows")) { - /* - * On Windows, we avoid creating a unique temporary directory per invocation lest - * we pollute the temporary directory. On other operating systems, temporary directories - * will be cleaned automatically via various mechanisms (e.g., systemd, or restarts). - */ - path = Paths.get(processInfo.sysprops().get("java.io.tmpdir"), "elasticsearch"); - Files.createDirectories(path); - } else { - path = createTempDirectory("elasticsearch-"); - } - } - return path; - } - - @SuppressForbidden(reason = "Files#createTempDirectory(String, FileAttribute...)") - private static Path createTempDirectory(final String prefix, final FileAttribute... attrs) throws IOException { - return Files.createTempDirectory(prefix, attrs); - } } diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java new file mode 100644 index 0000000000000..4ef1e2bfd4737 --- /dev/null +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessBuilder.java @@ -0,0 +1,208 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.server.cli; + +import org.elasticsearch.bootstrap.ServerArgs; +import org.elasticsearch.cli.ProcessInfo; +import org.elasticsearch.cli.Terminal; +import org.elasticsearch.cli.UserException; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; +import org.elasticsearch.core.PathUtils; + +import java.io.IOException; +import java.io.OutputStream; +import java.io.UncheckedIOException; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.stream.Stream; + +/** + * This class is used to create a {@link ServerProcess}. + * Each ServerProcessBuilder instance manages a collection of process attributes. The {@link ServerProcessBuilder#start()} method creates + * a new {@link ServerProcess} instance with those attributes. + * + * Each process builder manages these process attributes: + * - a temporary directory + * - process info to pass through to the new Java subprocess + * - the command line arguments to run Elasticsearch + * - a list of JVM options to be passed to the Elasticsearch Java process + * - a {@link Terminal} to read input and write output from/to the cli console + */ +public class ServerProcessBuilder { + private Path tempDir; + private ServerArgs serverArgs; + private ProcessInfo processInfo; + private List jvmOptions; + private Terminal terminal; + + // this allows mocking the process building by tests + interface ProcessStarter { + Process start(ProcessBuilder pb) throws IOException; + } + + /** + * Specifies the temporary directory to be used by the server process + */ + public ServerProcessBuilder withTempDir(Path tempDir) { + this.tempDir = tempDir; + return this; + } + + /** + * Specifies the process info to pass through to the new Java subprocess + */ + public ServerProcessBuilder withProcessInfo(ProcessInfo processInfo) { + this.processInfo = processInfo; + return this; + } + + /** + * Specifies the command line arguments to run Elasticsearch + */ + public ServerProcessBuilder withServerArgs(ServerArgs serverArgs) { + this.serverArgs = serverArgs; + return this; + } + + /** + * Specifies the JVM options to be passed to the Elasticsearch Java process + */ + public ServerProcessBuilder withJvmOptions(List jvmOptions) { + this.jvmOptions = jvmOptions; + return this; + } + + /** + * Specifies the {@link Terminal} to use for reading input and writing output from/to the cli console + */ + public ServerProcessBuilder withTerminal(Terminal terminal) { + this.terminal = terminal; + return this; + } + + private Map getEnvironment() { + Map envVars = new HashMap<>(processInfo.envVars()); + + envVars.remove("ES_TMPDIR"); + if (envVars.containsKey("LIBFFI_TMPDIR") == false) { + envVars.put("LIBFFI_TMPDIR", tempDir.toString()); + } + envVars.remove("ES_JAVA_OPTS"); + + return envVars; + } + + private List getJvmArgs() { + Path esHome = processInfo.workingDir(); + return List.of( + "--module-path", + esHome.resolve("lib").toString(), + // Special circumstances require some modules (not depended on by the main server module) to be explicitly added: + "--add-modules=jdk.net", // needed to reflectively set extended socket options + // we control the module path, which may have additional modules not required by server + "--add-modules=ALL-MODULE-PATH", + "-m", + "org.elasticsearch.server/org.elasticsearch.bootstrap.Elasticsearch" + ); + } + + private String getCommand() { + Path javaHome = PathUtils.get(processInfo.sysprops().get("java.home")); + + boolean isWindows = processInfo.sysprops().get("os.name").startsWith("Windows"); + return javaHome.resolve("bin").resolve("java" + (isWindows ? ".exe" : "")).toString(); + } + + /** + * Start a server in a new process. + * + * @return A running server process that is ready for requests + * @throws UserException If the process failed during bootstrap + */ + public ServerProcess start() throws UserException { + return start(ProcessBuilder::start); + } + + private static void checkRequiredArgument(Object argument, String argumentName) { + if (argument == null) { + throw new IllegalStateException( + Strings.format("'%s' is a required argument and needs to be specified before calling start()", argumentName) + ); + } + } + + // package private for testing + ServerProcess start(ProcessStarter processStarter) throws UserException { + checkRequiredArgument(tempDir, "tempDir"); + checkRequiredArgument(serverArgs, "serverArgs"); + checkRequiredArgument(processInfo, "processInfo"); + checkRequiredArgument(jvmOptions, "jvmOptions"); + checkRequiredArgument(terminal, "terminal"); + + Process jvmProcess = null; + ErrorPumpThread errorPump; + + boolean success = false; + try { + jvmProcess = createProcess(getCommand(), getJvmArgs(), jvmOptions, getEnvironment(), processStarter); + errorPump = new ErrorPumpThread(terminal.getErrorWriter(), jvmProcess.getErrorStream()); + errorPump.start(); + sendArgs(serverArgs, jvmProcess.getOutputStream()); + + String errorMsg = errorPump.waitUntilReady(); + if (errorMsg != null) { + // something bad happened, wait for the process to exit then rethrow + int exitCode = jvmProcess.waitFor(); + throw new UserException(exitCode, errorMsg); + } + success = true; + } catch (InterruptedException e) { + throw new RuntimeException(e); + } catch (IOException e) { + throw new UncheckedIOException(e); + } finally { + if (success == false && jvmProcess != null && jvmProcess.isAlive()) { + jvmProcess.destroyForcibly(); + } + } + + return new ServerProcess(jvmProcess, errorPump); + } + + private static Process createProcess( + String command, + List jvmArgs, + List jvmOptions, + Map environment, + ProcessStarter processStarter + ) throws InterruptedException, IOException { + + var builder = new ProcessBuilder(Stream.concat(Stream.of(command), Stream.concat(jvmOptions.stream(), jvmArgs.stream())).toList()); + builder.environment().putAll(environment); + builder.redirectOutput(ProcessBuilder.Redirect.INHERIT); + + return processStarter.start(builder); + } + + private static void sendArgs(ServerArgs args, OutputStream processStdin) { + // DO NOT close the underlying process stdin, since we need to be able to write to it to signal exit + var out = new OutputStreamStreamOutput(processStdin); + try { + args.writeTo(out); + out.flush(); + } catch (IOException ignore) { + // A failure to write here means the process has problems, and it will die anyway. We let this fall through + // so the pump thread can complete, writing out the actual error. All we get here is the failure to write to + // the process pipe, which isn't helpful to print. + } + } +} diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessUtils.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessUtils.java new file mode 100644 index 0000000000000..ebbc68b1be90b --- /dev/null +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/ServerProcessUtils.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.server.cli; + +import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.ProcessInfo; +import org.elasticsearch.cli.UserException; +import org.elasticsearch.core.SuppressForbidden; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.attribute.FileAttribute; + +public class ServerProcessUtils { + + /** + * Returns the java.io.tmpdir Elasticsearch should use, creating it if necessary. + * + *

On non-Windows OS, this will be created as a subdirectory of the default temporary directory. + * Note that this causes the created temporary directory to be a private temporary directory. + */ + public static Path setupTempDir(ProcessInfo processInfo) throws UserException { + final Path path; + String tmpDirOverride = processInfo.envVars().get("ES_TMPDIR"); + if (tmpDirOverride != null) { + path = Paths.get(tmpDirOverride); + if (Files.exists(path) == false) { + throw new UserException(ExitCodes.CONFIG, "Temporary directory [" + path + "] does not exist or is not accessible"); + } + if (Files.isDirectory(path) == false) { + throw new UserException(ExitCodes.CONFIG, "Temporary directory [" + path + "] is not a directory"); + } + } else { + try { + if (processInfo.sysprops().get("os.name").startsWith("Windows")) { + /* + * On Windows, we avoid creating a unique temporary directory per invocation lest + * we pollute the temporary directory. On other operating systems, temporary directories + * will be cleaned automatically via various mechanisms (e.g., systemd, or restarts). + */ + path = Paths.get(processInfo.sysprops().get("java.io.tmpdir"), "elasticsearch"); + Files.createDirectories(path); + } else { + path = createTempDirectory("elasticsearch-"); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + return path; + } + + @SuppressForbidden(reason = "Files#createTempDirectory(String, FileAttribute...)") + private static Path createTempDirectory(final String prefix, final FileAttribute... attrs) throws IOException { + return Files.createTempDirectory(prefix, attrs); + } +} diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java index 6e250075f7747..4a8b3da4777a0 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java @@ -12,12 +12,13 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import java.util.List; +import java.util.Map; import java.util.stream.Collectors; import java.util.stream.Stream; final class SystemJvmOptions { - static List systemJvmOptions(Settings nodeSettings) { + static List systemJvmOptions(Settings nodeSettings, final Map sysprops) { return Stream.of( /* * Cache ttl in seconds for positive DNS lookups noting that this overrides the JDK security property networkaddress.cache.ttl; @@ -65,7 +66,9 @@ static List systemJvmOptions(Settings nodeSettings) { */ "--add-opens=java.base/java.io=org.elasticsearch.preallocate", maybeOverrideDockerCgroup(), - maybeSetActiveProcessorCount(nodeSettings) + maybeSetActiveProcessorCount(nodeSettings), + // Pass through distribution type + "-Des.distribution.type=" + sysprops.get("es.distribution.type") ).filter(e -> e.isEmpty() == false).collect(Collectors.toList()); } diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmOptionsParserTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmOptionsParserTests.java index 03856b1024992..101be4301b522 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmOptionsParserTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/JvmOptionsParserTests.java @@ -53,7 +53,6 @@ public void testUnversionedOptions() throws IOException { try (StringReader sr = new StringReader("-Xms1g\n-Xmx1g"); BufferedReader br = new BufferedReader(sr)) { assertExpectedJvmOptions(randomIntBetween(8, Integer.MAX_VALUE), br, Arrays.asList("-Xms1g", "-Xmx1g")); } - } public void testSingleVersionOption() throws IOException { @@ -351,25 +350,30 @@ public void accept(final int lineNumber, final String line) { public void testNodeProcessorsActiveCount() { { - final List jvmOptions = SystemJvmOptions.systemJvmOptions(Settings.EMPTY); + final List jvmOptions = SystemJvmOptions.systemJvmOptions(Settings.EMPTY, Map.of()); assertThat(jvmOptions, not(hasItem(containsString("-XX:ActiveProcessorCount=")))); } { Settings nodeSettings = Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), 1).build(); - final List jvmOptions = SystemJvmOptions.systemJvmOptions(nodeSettings); + final List jvmOptions = SystemJvmOptions.systemJvmOptions(nodeSettings, Map.of()); assertThat(jvmOptions, hasItem("-XX:ActiveProcessorCount=1")); } { // check rounding Settings nodeSettings = Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), 0.2).build(); - final List jvmOptions = SystemJvmOptions.systemJvmOptions(nodeSettings); + final List jvmOptions = SystemJvmOptions.systemJvmOptions(nodeSettings, Map.of()); assertThat(jvmOptions, hasItem("-XX:ActiveProcessorCount=1")); } { // check validation Settings nodeSettings = Settings.builder().put(EsExecutors.NODE_PROCESSORS_SETTING.getKey(), 10000).build(); - var e = expectThrows(IllegalArgumentException.class, () -> SystemJvmOptions.systemJvmOptions(nodeSettings)); + var e = expectThrows(IllegalArgumentException.class, () -> SystemJvmOptions.systemJvmOptions(nodeSettings, Map.of())); assertThat(e.getMessage(), containsString("setting [node.processors] must be <=")); } } + + public void testCommandLineDistributionType() { + final List jvmOptions = SystemJvmOptions.systemJvmOptions(Settings.EMPTY, Map.of("es.distribution.type", "testdistro")); + assertThat(jvmOptions, hasItem("-Des.distribution.type=testdistro")); + } } diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java index da2c0104dd08e..e469764590bd6 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerCliTests.java @@ -314,6 +314,21 @@ public void testIgnoreNullExceptionOutput() throws Exception { assertThat(terminal.getErrorOutput(), not(containsString("null"))); } + public void testOptionsBuildingInterrupted() throws IOException { + Command command = new TestServerCli() { + @Override + protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args) throws Exception { + throw new InterruptedException("interrupted while get jvm options"); + } + }; + var e = expectThrows( + InterruptedException.class, + () -> command.main(new String[0], terminal, new ProcessInfo(sysprops, envVars, esHomeDir)) + ); + assertThat(e.getMessage(), equalTo("interrupted while get jvm options")); + command.close(); + } + public void testServerExitsNonZero() throws Exception { mockServerExitCode = 140; int exitCode = executeMain(); @@ -480,63 +495,65 @@ void reset() { } } - @Override - protected Command newCommand() { - return new ServerCli() { - @Override - protected Command loadTool(String toolname, String libs) { - if (toolname.equals("auto-configure-node")) { - assertThat(libs, equalTo("modules/x-pack-core,modules/x-pack-security,lib/tools/security-cli")); - return AUTO_CONFIG_CLI; - } else if (toolname.equals("sync-plugins")) { - assertThat(libs, equalTo("lib/tools/plugin-cli")); - return SYNC_PLUGINS_CLI; - } - throw new AssertionError("Unknown tool: " + toolname); + private class TestServerCli extends ServerCli { + @Override + protected Command loadTool(String toolname, String libs) { + if (toolname.equals("auto-configure-node")) { + assertThat(libs, equalTo("modules/x-pack-core,modules/x-pack-security,lib/tools/security-cli")); + return AUTO_CONFIG_CLI; + } else if (toolname.equals("sync-plugins")) { + assertThat(libs, equalTo("lib/tools/plugin-cli")); + return SYNC_PLUGINS_CLI; } + throw new AssertionError("Unknown tool: " + toolname); + } - @Override - Environment autoConfigureSecurity( - Terminal terminal, - OptionSet options, - ProcessInfo processInfo, - Environment env, - SecureString keystorePassword - ) throws Exception { - if (mockSecureSettingsLoader != null && mockSecureSettingsLoader.supportsSecurityAutoConfiguration() == false) { - fail("We shouldn't be calling auto configure on loaders that don't support it"); - } - return super.autoConfigureSecurity(terminal, options, processInfo, env, keystorePassword); + @Override + Environment autoConfigureSecurity( + Terminal terminal, + OptionSet options, + ProcessInfo processInfo, + Environment env, + SecureString keystorePassword + ) throws Exception { + if (mockSecureSettingsLoader != null && mockSecureSettingsLoader.supportsSecurityAutoConfiguration() == false) { + fail("We shouldn't be calling auto configure on loaders that don't support it"); } + return super.autoConfigureSecurity(terminal, options, processInfo, env, keystorePassword); + } - @Override - protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args) { - if (argsValidator != null) { - argsValidator.accept(args); - } - mockServer.reset(); - return mockServer; + @Override + void syncPlugins(Terminal terminal, Environment env, ProcessInfo processInfo) throws Exception { + if (mockSecureSettingsLoader != null && mockSecureSettingsLoader instanceof MockSecureSettingsLoader mock) { + mock.verifiedEnv = true; + // equals as a pointer, environment shouldn't be changed if autoconfigure is not supported + assertFalse(mockSecureSettingsLoader.supportsSecurityAutoConfiguration()); + assertTrue(mock.environment == env); } - @Override - void syncPlugins(Terminal terminal, Environment env, ProcessInfo processInfo) throws Exception { - if (mockSecureSettingsLoader != null && mockSecureSettingsLoader instanceof MockSecureSettingsLoader mock) { - mock.verifiedEnv = true; - // equals as a pointer, environment shouldn't be changed if autoconfigure is not supported - assertFalse(mockSecureSettingsLoader.supportsSecurityAutoConfiguration()); - assertTrue(mock.environment == env); - } + super.syncPlugins(terminal, env, processInfo); + } - super.syncPlugins(terminal, env, processInfo); + @Override + protected SecureSettingsLoader secureSettingsLoader(Environment env) { + if (mockSecureSettingsLoader != null) { + return mockSecureSettingsLoader; } + return new KeystoreSecureSettingsLoader(); + } + } + + @Override + protected Command newCommand() { + return new TestServerCli() { @Override - protected SecureSettingsLoader secureSettingsLoader(Environment env) { - if (mockSecureSettingsLoader != null) { - return mockSecureSettingsLoader; + protected ServerProcess startServer(Terminal terminal, ProcessInfo processInfo, ServerArgs args) { + if (argsValidator != null) { + argsValidator.accept(args); } - - return new KeystoreSecureSettingsLoader(); + mockServer.reset(); + return mockServer; } }; } diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java index 57993d40391ac..fa36007b40af7 100644 --- a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.cli.ExitCodes; import org.elasticsearch.cli.MockTerminal; import org.elasticsearch.cli.ProcessInfo; -import org.elasticsearch.cli.UserException; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.settings.KeyStoreWrapper; import org.elasticsearch.common.settings.SecureSettings; @@ -34,7 +33,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; @@ -47,7 +45,6 @@ import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.server.cli.ProcessUtil.nonInterruptibleVoid; -import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; @@ -56,7 +53,6 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; -import static org.hamcrest.Matchers.startsWith; public class ServerProcessTests extends ESTestCase { @@ -66,7 +62,6 @@ public class ServerProcessTests extends ESTestCase { protected final Map envVars = new HashMap<>(); Path esHomeDir; Settings.Builder nodeSettings; - ServerProcess.OptionsBuilder optionsBuilder; ProcessValidator processValidator; MainMethod mainCallback; MockElasticsearchProcess process; @@ -81,7 +76,7 @@ interface ProcessValidator { } int runForeground() throws Exception { - var server = startProcess(false, false, ""); + var server = startProcess(false, false); return server.waitFor(); } @@ -94,7 +89,6 @@ public void resetEnv() { envVars.clear(); esHomeDir = createTempDir(); nodeSettings = Settings.builder(); - optionsBuilder = (args, configDir, tmpDir, envOptions) -> new ArrayList<>(); processValidator = null; mainCallback = null; secrets = KeyStoreWrapper.create(); @@ -193,9 +187,12 @@ public Process destroyForcibly() { } } - ServerProcess startProcess(boolean daemonize, boolean quiet, String keystorePassword) throws Exception { - var pinfo = new ProcessInfo(Map.copyOf(sysprops), Map.copyOf(envVars), esHomeDir); - var args = new ServerArgs( + ProcessInfo createProcessInfo() { + return new ProcessInfo(Map.copyOf(sysprops), Map.copyOf(envVars), esHomeDir); + } + + ServerArgs createServerArgs(boolean daemonize, boolean quiet) { + return new ServerArgs( daemonize, quiet, null, @@ -204,14 +201,23 @@ ServerProcess startProcess(boolean daemonize, boolean quiet, String keystorePass esHomeDir.resolve("config"), esHomeDir.resolve("logs") ); - ServerProcess.ProcessStarter starter = pb -> { + } + + ServerProcess startProcess(boolean daemonize, boolean quiet) throws Exception { + var pinfo = createProcessInfo(); + ServerProcessBuilder.ProcessStarter starter = pb -> { if (processValidator != null) { processValidator.validate(pb); } process = new MockElasticsearchProcess(); return process; }; - return ServerProcess.start(terminal, pinfo, args, optionsBuilder, starter); + var serverProcessBuilder = new ServerProcessBuilder().withTerminal(terminal) + .withProcessInfo(pinfo) + .withServerArgs(createServerArgs(daemonize, quiet)) + .withJvmOptions(List.of()) + .withTempDir(ServerProcessUtils.setupTempDir(pinfo)); + return serverProcessBuilder.start(starter); } public void testProcessBuilder() throws Exception { @@ -231,7 +237,7 @@ public void testProcessBuilder() throws Exception { } public void testPid() throws Exception { - var server = startProcess(true, false, ""); + var server = startProcess(true, false); assertThat(server.pid(), equalTo(12345L)); server.stop(); } @@ -246,18 +252,12 @@ public void testBootstrapError() throws Exception { assertThat(terminal.getErrorOutput(), containsString("a bootstrap exception")); } - public void testStartError() throws Exception { + public void testStartError() { processValidator = pb -> { throw new IOException("something went wrong"); }; - var e = expectThrows(UncheckedIOException.class, () -> runForeground()); + var e = expectThrows(UncheckedIOException.class, this::runForeground); assertThat(e.getCause().getMessage(), equalTo("something went wrong")); } - public void testOptionsBuildingInterrupted() throws Exception { - optionsBuilder = (args, configDir, tmpDir, envOptions) -> { throw new InterruptedException("interrupted while get jvm options"); }; - var e = expectThrows(RuntimeException.class, () -> runForeground()); - assertThat(e.getCause().getMessage(), equalTo("interrupted while get jvm options")); - } - public void testEnvPassthrough() throws Exception { envVars.put("MY_ENV", "foo"); processValidator = pb -> { assertThat(pb.environment(), hasEntry(equalTo("MY_ENV"), equalTo("foo"))); }; @@ -276,83 +276,48 @@ public void testLibffiEnv() throws Exception { runForeground(); } - public void testTempDir() throws Exception { - optionsBuilder = (args, configDir, tmpDir, envOptions) -> { - assertThat(tmpDir.toString(), Files.exists(tmpDir), is(true)); - assertThat(tmpDir.getFileName().toString(), startsWith("elasticsearch-")); - return new ArrayList<>(); - }; - runForeground(); - } - - public void testTempDirWindows() throws Exception { - Path baseTmpDir = createTempDir(); - sysprops.put("os.name", "Windows 10"); - sysprops.put("java.io.tmpdir", baseTmpDir.toString()); - optionsBuilder = (args, configDir, tmpDir, envOptions) -> { - assertThat(tmpDir.toString(), Files.exists(tmpDir), is(true)); - assertThat(tmpDir.getFileName().toString(), equalTo("elasticsearch")); - assertThat(tmpDir.getParent().toString(), equalTo(baseTmpDir.toString())); - return new ArrayList<>(); - }; - runForeground(); - } - - public void testTempDirOverride() throws Exception { + public void testEnvCleared() throws Exception { Path customTmpDir = createTempDir(); envVars.put("ES_TMPDIR", customTmpDir.toString()); - optionsBuilder = (args, configDir, tmpDir, envOptions) -> { - assertThat(tmpDir.toString(), equalTo(customTmpDir.toString())); - return new ArrayList<>(); - }; - processValidator = pb -> assertThat(pb.environment(), not(hasKey("ES_TMPDIR"))); - runForeground(); - } - - public void testTempDirOverrideMissing() throws Exception { - Path baseDir = createTempDir(); - envVars.put("ES_TMPDIR", baseDir.resolve("dne").toString()); - var e = expectThrows(UserException.class, () -> runForeground()); - assertThat(e.exitCode, equalTo(ExitCodes.CONFIG)); - assertThat(e.getMessage(), containsString("dne] does not exist")); - } - - public void testTempDirOverrideNotADirectory() throws Exception { - Path tmpFile = createTempFile(); - envVars.put("ES_TMPDIR", tmpFile.toString()); - var e = expectThrows(UserException.class, () -> runForeground()); - assertThat(e.exitCode, equalTo(ExitCodes.CONFIG)); - assertThat(e.getMessage(), containsString("is not a directory")); - } - - public void testCustomJvmOptions() throws Exception { envVars.put("ES_JAVA_OPTS", "-Dmyoption=foo"); - optionsBuilder = (args, configDir, tmpDir, envOptions) -> { - assertThat(envOptions, equalTo("-Dmyoption=foo")); - return new ArrayList<>(); + + processValidator = pb -> { + assertThat(pb.environment(), not(hasKey("ES_TMPDIR"))); + assertThat(pb.environment(), not(hasKey("ES_JAVA_OPTS"))); }; - processValidator = pb -> assertThat(pb.environment(), not(hasKey("ES_JAVA_OPTS"))); runForeground(); } public void testCommandLineSysprops() throws Exception { - optionsBuilder = (args, configDir, tmpDir, envOptions) -> List.of("-Dfoo1=bar", "-Dfoo2=baz"); - processValidator = pb -> { - assertThat(pb.command(), contains("-Dfoo1=bar")); - assertThat(pb.command(), contains("-Dfoo2=bar")); + ServerProcessBuilder.ProcessStarter starter = pb -> { + assertThat(pb.command(), hasItems("-Dfoo1=bar", "-Dfoo2=baz")); + process = new MockElasticsearchProcess(); + return process; }; + var serverProcessBuilder = new ServerProcessBuilder().withTerminal(terminal) + .withProcessInfo(createProcessInfo()) + .withServerArgs(createServerArgs(false, false)) + .withJvmOptions(List.of("-Dfoo1=bar", "-Dfoo2=baz")) + .withTempDir(Path.of(".")); + serverProcessBuilder.start(starter).waitFor(); + } + + public void testServerProcessBuilderMissingArgumentError() throws Exception { + ServerProcessBuilder.ProcessStarter starter = pb -> new MockElasticsearchProcess(); + var serverProcessBuilder = new ServerProcessBuilder().withTerminal(terminal) + .withProcessInfo(createProcessInfo()) + .withServerArgs(createServerArgs(false, false)) + .withTempDir(Path.of(".")); + var ex = expectThrows(IllegalStateException.class, () -> serverProcessBuilder.start(starter).waitFor()); + assertThat(ex.getMessage(), equalTo("'jvmOptions' is a required argument and needs to be specified before calling start()")); } public void testCommandLine() throws Exception { String mainClass = "org.elasticsearch.server/org.elasticsearch.bootstrap.Elasticsearch"; - String distroSysprop = "-Des.distribution.type=testdistro"; String modulePath = esHomeDir.resolve("lib").toString(); Path javaBin = Paths.get("javahome").resolve("bin"); - sysprops.put("es.distribution.type", "testdistro"); AtomicReference expectedJava = new AtomicReference<>(javaBin.resolve("java").toString()); - processValidator = pb -> { - assertThat(pb.command(), hasItems(expectedJava.get(), distroSysprop, "--module-path", modulePath, "-m", mainClass)); - }; + processValidator = pb -> { assertThat(pb.command(), hasItems(expectedJava.get(), "--module-path", modulePath, "-m", mainClass)); }; runForeground(); sysprops.put("os.name", "Windows 10"); @@ -370,7 +335,7 @@ public void testDetach() throws Exception { // will block until stdin closed manually after test assertThat(stdin.read(), equalTo(-1)); }; - var server = startProcess(true, false, ""); + var server = startProcess(true, false); server.detach(); assertThat(terminal.getErrorOutput(), containsString("final message")); server.stop(); // this should be a noop, and will fail the stdin read assert above if shutdown sent @@ -384,7 +349,7 @@ public void testStop() throws Exception { nonInterruptibleVoid(mainReady::await); stderr.println("final message"); }; - var server = startProcess(false, false, ""); + var server = startProcess(false, false); mainReady.countDown(); server.stop(); assertThat(process.main.isDone(), is(true)); // stop should have waited @@ -399,7 +364,7 @@ public void testWaitFor() throws Exception { assertThat(stdin.read(), equalTo((int) BootstrapInfo.SERVER_SHUTDOWN_MARKER)); stderr.println("final message"); }; - var server = startProcess(false, false, ""); + var server = startProcess(false, false); new Thread(() -> { // simulate stop run as shutdown hook in another thread, eg from Ctrl-C nonInterruptibleVoid(mainReady::await); @@ -420,7 +385,7 @@ public void testProcessDies() throws Exception { nonInterruptibleVoid(mainExit::await); exitCode.set(-9); }; - var server = startProcess(false, false, ""); + var server = startProcess(false, false); mainExit.countDown(); int exitCode = server.waitFor(); assertThat(exitCode, equalTo(-9)); diff --git a/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessUtilsTests.java b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessUtilsTests.java new file mode 100644 index 0000000000000..8cd1b63e41b03 --- /dev/null +++ b/distribution/tools/server-cli/src/test/java/org/elasticsearch/server/cli/ServerProcessUtilsTests.java @@ -0,0 +1,82 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.server.cli; + +import org.elasticsearch.cli.ExitCodes; +import org.elasticsearch.cli.ProcessInfo; +import org.elasticsearch.cli.UserException; +import org.elasticsearch.test.ESTestCase; +import org.junit.Before; + +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.HashMap; +import java.util.Map; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.startsWith; + +public class ServerProcessUtilsTests extends ESTestCase { + + protected final Map sysprops = new HashMap<>(); + protected final Map envVars = new HashMap<>(); + + @Before + public void resetEnv() { + sysprops.clear(); + sysprops.put("os.name", "Linux"); + sysprops.put("java.home", "javahome"); + envVars.clear(); + } + + private ProcessInfo createProcessInfo() { + return new ProcessInfo(Map.copyOf(sysprops), Map.copyOf(envVars), Path.of(".")); + } + + public void testTempDir() throws Exception { + var tmpDir = ServerProcessUtils.setupTempDir(createProcessInfo()); + assertThat(tmpDir.toString(), Files.exists(tmpDir), is(true)); + assertThat(tmpDir.getFileName().toString(), startsWith("elasticsearch-")); + } + + public void testTempDirWindows() throws Exception { + Path baseTmpDir = createTempDir(); + sysprops.put("os.name", "Windows 10"); + sysprops.put("java.io.tmpdir", baseTmpDir.toString()); + var tmpDir = ServerProcessUtils.setupTempDir(createProcessInfo()); + assertThat(tmpDir.toString(), Files.exists(tmpDir), is(true)); + assertThat(tmpDir.getFileName().toString(), equalTo("elasticsearch")); + assertThat(tmpDir.getParent().toString(), equalTo(baseTmpDir.toString())); + } + + public void testTempDirOverride() throws Exception { + Path customTmpDir = createTempDir(); + envVars.put("ES_TMPDIR", customTmpDir.toString()); + var tmpDir = ServerProcessUtils.setupTempDir(createProcessInfo()); + assertThat(tmpDir.toString(), equalTo(customTmpDir.toString())); + } + + public void testTempDirOverrideMissing() { + Path baseDir = createTempDir(); + envVars.put("ES_TMPDIR", baseDir.resolve("dne").toString()); + var e = expectThrows(UserException.class, () -> ServerProcessUtils.setupTempDir(createProcessInfo())); + assertThat(e.exitCode, equalTo(ExitCodes.CONFIG)); + assertThat(e.getMessage(), containsString("dne] does not exist")); + } + + public void testTempDirOverrideNotADirectory() throws Exception { + Path tmpFile = createTempFile(); + envVars.put("ES_TMPDIR", tmpFile.toString()); + var e = expectThrows(UserException.class, () -> ServerProcessUtils.setupTempDir(createProcessInfo())); + assertThat(e.exitCode, equalTo(ExitCodes.CONFIG)); + assertThat(e.getMessage(), containsString("is not a directory")); + } +} diff --git a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java index 858787b361654..2c42dcf5cb2f5 100644 --- a/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java +++ b/distribution/tools/windows-service-cli/src/main/java/org/elasticsearch/windows/service/WindowsServiceDaemon.java @@ -17,7 +17,10 @@ import org.elasticsearch.common.settings.KeyStoreWrapper; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.env.Environment; +import org.elasticsearch.server.cli.JvmOptionsParser; import org.elasticsearch.server.cli.ServerProcess; +import org.elasticsearch.server.cli.ServerProcessBuilder; +import org.elasticsearch.server.cli.ServerProcessUtils; /** * Starts an Elasticsearch process, but does not wait for it to exit. @@ -38,7 +41,14 @@ public void execute(Terminal terminal, OptionSet options, Environment env, Proce // the Windows service daemon doesn't support secure settings implementations other than the keystore try (var loadedSecrets = KeyStoreWrapper.bootstrap(env.configFile(), () -> new SecureString(new char[0]))) { var args = new ServerArgs(false, true, null, loadedSecrets, env.settings(), env.configFile(), env.logsFile()); - this.server = ServerProcess.start(terminal, processInfo, args); + var tempDir = ServerProcessUtils.setupTempDir(processInfo); + var jvmOptions = JvmOptionsParser.determineJvmOptions(args, processInfo, tempDir); + var serverProcessBuilder = new ServerProcessBuilder().withTerminal(terminal) + .withProcessInfo(processInfo) + .withServerArgs(args) + .withTempDir(tempDir) + .withJvmOptions(jvmOptions); + this.server = serverProcessBuilder.start(); // start does not return until the server is ready, and we do not wait for the process } } diff --git a/docs/Versions.asciidoc b/docs/Versions.asciidoc index 3f44db9928434..420ee36359745 100644 --- a/docs/Versions.asciidoc +++ b/docs/Versions.asciidoc @@ -1,8 +1,8 @@ include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[] -:lucene_version: 9.9.0 -:lucene_version_path: 9_9_0 +:lucene_version: 9.9.1 +:lucene_version_path: 9_9_1 :jdk: 11.0.2 :jdk_major: 11 :build_type: tar diff --git a/docs/build.gradle b/docs/build.gradle index ddd2a38b5160b..b6f696f0aae6a 100644 --- a/docs/build.gradle +++ b/docs/build.gradle @@ -113,8 +113,9 @@ testClusters.matching { it.name == "yamlRestTest"}.configureEach { requiresFeature 'es.index_mode_feature_flag_registered', Version.fromString("8.0.0") requiresFeature 'es.failure_store_feature_flag_enabled', Version.fromString("8.12.0") - extraConfigFile 'op-jwks.json', project(':x-pack:test:idp-fixture').file("oidc/op-jwks.json") - extraConfigFile 'idp-docs-metadata.xml', project(':x-pack:test:idp-fixture').file("idp/shibboleth-idp/metadata/idp-docs-metadata.xml") + // TODO Rene: clean up this kind of cross project file references + extraConfigFile 'op-jwks.json', project(':x-pack:test:idp-fixture').file("src/main/resources/oidc/op-jwks.json") + extraConfigFile 'idp-docs-metadata.xml', project(':x-pack:test:idp-fixture').file("src/main/resources/idp/shibboleth-idp/metadata/idp-docs-metadata.xml") extraConfigFile 'testClient.crt', project(':x-pack:plugin:security').file("src/test/resources/org/elasticsearch/xpack/security/action/pki_delegation/testClient.crt") setting 'xpack.security.enabled', 'true' setting 'xpack.security.authc.api_key.enabled', 'true' diff --git a/docs/changelog/100740.yaml b/docs/changelog/100740.yaml new file mode 100644 index 0000000000000..c93fbf676ef81 --- /dev/null +++ b/docs/changelog/100740.yaml @@ -0,0 +1,6 @@ +pr: 100740 +summary: "ESQL: Referencing expressions that contain backticks requires <>." +area: ES|QL +type: enhancement +issues: + - 100312 diff --git a/docs/changelog/102557.yaml b/docs/changelog/102557.yaml new file mode 100644 index 0000000000000..dfca1763064d4 --- /dev/null +++ b/docs/changelog/102557.yaml @@ -0,0 +1,5 @@ +pr: 102557 +summary: Metrics for search latencies +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/102584.yaml b/docs/changelog/102584.yaml new file mode 100644 index 0000000000000..44ff5dd9f7461 --- /dev/null +++ b/docs/changelog/102584.yaml @@ -0,0 +1,5 @@ +pr: 102584 +summary: Expose some ML metrics via APM +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/102734.yaml b/docs/changelog/102734.yaml deleted file mode 100644 index c27846d7d8478..0000000000000 --- a/docs/changelog/102734.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 102734 -summary: Allow match field in enrich fields -area: ES|QL -type: bug -issues: [] diff --git a/docs/changelog/102741.yaml b/docs/changelog/102741.yaml deleted file mode 100644 index 84a4b8092632f..0000000000000 --- a/docs/changelog/102741.yaml +++ /dev/null @@ -1,6 +0,0 @@ -pr: 102741 -summary: "[ILM] More resilient when a policy is added to searchable snapshot" -area: ILM+SLM -type: bug -issues: - - 101958 diff --git a/docs/changelog/102765.yaml b/docs/changelog/102765.yaml new file mode 100644 index 0000000000000..eb73da2650542 --- /dev/null +++ b/docs/changelog/102765.yaml @@ -0,0 +1,5 @@ +pr: 102765 +summary: "Add APM metrics to `HealthPeriodicLogger`" +area: Health +type: enhancement +issues: [] diff --git a/docs/changelog/102798.yaml b/docs/changelog/102798.yaml new file mode 100644 index 0000000000000..986ad99f96a19 --- /dev/null +++ b/docs/changelog/102798.yaml @@ -0,0 +1,5 @@ +pr: 102798 +summary: Hot-reloadable remote cluster credentials +area: Security +type: enhancement +issues: [] diff --git a/docs/changelog/102862.yaml b/docs/changelog/102862.yaml new file mode 100644 index 0000000000000..bb453163009d5 --- /dev/null +++ b/docs/changelog/102862.yaml @@ -0,0 +1,5 @@ +pr: 102862 +summary: Add optional pruning configuration (weighted terms scoring) to text expansion query +area: "Machine Learning" +type: enhancement +issues: [] diff --git a/docs/changelog/102879.yaml b/docs/changelog/102879.yaml new file mode 100644 index 0000000000000..b35d36dd0a3a9 --- /dev/null +++ b/docs/changelog/102879.yaml @@ -0,0 +1,5 @@ +pr: 102879 +summary: Fix disk computation when initializing new shards +area: Allocation +type: bug +issues: [] diff --git a/docs/changelog/102958.yaml b/docs/changelog/102958.yaml deleted file mode 100644 index bb357c1eb09b5..0000000000000 --- a/docs/changelog/102958.yaml +++ /dev/null @@ -1,7 +0,0 @@ -pr: 102958 -summary: Ensure transform `_schedule_now` API only triggers the expected transform - task -area: Transform -type: bug -issues: - - 102956 diff --git a/docs/changelog/103031.yaml b/docs/changelog/103031.yaml deleted file mode 100644 index f63094139f5ca..0000000000000 --- a/docs/changelog/103031.yaml +++ /dev/null @@ -1,9 +0,0 @@ -pr: 103031 -summary: Collect warnings in compute service -area: ES|QL -type: bug -issues: - - 100163 - - 103028 - - 102871 - - 102982 diff --git a/docs/changelog/103032.yaml b/docs/changelog/103032.yaml new file mode 100644 index 0000000000000..81d84fca0bdb0 --- /dev/null +++ b/docs/changelog/103032.yaml @@ -0,0 +1,5 @@ +pr: 103032 +summary: "x-pack/plugin/apm-data: Map some APM fields as flattened and fix error.grouping_name script" +area: Data streams +type: enhancement +issues: [] diff --git a/docs/changelog/103047.yaml b/docs/changelog/103047.yaml deleted file mode 100644 index 59f86d679b55f..0000000000000 --- a/docs/changelog/103047.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103047 -summary: Ensure `dynamicMapping` updates are handled in insertion order -area: Mapping -type: bug -issues: [] diff --git a/docs/changelog/103087.yaml b/docs/changelog/103087.yaml deleted file mode 100644 index 5824bc53edb8d..0000000000000 --- a/docs/changelog/103087.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103087 -summary: Use latest version of entsearch ingestion pipeline -area: Application -type: bug -issues: [] diff --git a/docs/changelog/103099.yaml b/docs/changelog/103099.yaml new file mode 100644 index 0000000000000..c3fd3f9d7b8e4 --- /dev/null +++ b/docs/changelog/103099.yaml @@ -0,0 +1,6 @@ +pr: 103099 +summary: "ESQL: Simpify IS NULL/IS NOT NULL evaluation" +area: ES|QL +type: enhancement +issues: + - 103097 diff --git a/docs/changelog/103112.yaml b/docs/changelog/103112.yaml deleted file mode 100644 index dcb4cf604c179..0000000000000 --- a/docs/changelog/103112.yaml +++ /dev/null @@ -1,5 +0,0 @@ -pr: 103112 -summary: Add JIT compiler excludes for `computeCommonPrefixLengthAndBuildHistogram` -area: Search -type: bug -issues: [] diff --git a/docs/changelog/103150.yaml b/docs/changelog/103150.yaml new file mode 100644 index 0000000000000..3f42c882d89fb --- /dev/null +++ b/docs/changelog/103150.yaml @@ -0,0 +1,6 @@ +pr: 103150 +summary: "ES|QL: Fix NPE on single value detection" +area: ES|QL +type: bug +issues: + - 103141 diff --git a/docs/changelog/103176.yaml b/docs/changelog/103176.yaml new file mode 100644 index 0000000000000..a0f46c1462f62 --- /dev/null +++ b/docs/changelog/103176.yaml @@ -0,0 +1,5 @@ +pr: 103176 +summary: Validate settings in `ReloadSecureSettings` API +area: Client +type: bug +issues: [] diff --git a/docs/changelog/103190.yaml b/docs/changelog/103190.yaml new file mode 100644 index 0000000000000..5e6927d3eadd7 --- /dev/null +++ b/docs/changelog/103190.yaml @@ -0,0 +1,5 @@ +pr: 103190 +summary: ILM/SLM history policies forcemerge in hot and dsl configuration +area: ILM+SLM +type: enhancement +issues: [] diff --git a/docs/changelog/103212.yaml b/docs/changelog/103212.yaml new file mode 100644 index 0000000000000..3cbbddc8f2229 --- /dev/null +++ b/docs/changelog/103212.yaml @@ -0,0 +1,5 @@ +pr: 103212 +summary: Use the eql query filter for the open-pit request +area: EQL +type: enhancement +issues: [] diff --git a/docs/changelog/103223.yaml b/docs/changelog/103223.yaml new file mode 100644 index 0000000000000..c2f4c1b6a2cf4 --- /dev/null +++ b/docs/changelog/103223.yaml @@ -0,0 +1,10 @@ +pr: 103223 +summary: "[Synonyms] Mark Synonyms as GA" +area: "Search" +type: feature +issues: [] +highlight: + title: "GA Release of Synonyms API" + body: |- + Removes the beta label for the Synonyms API to make it GA. + notable: true diff --git a/docs/changelog/103316.yaml b/docs/changelog/103316.yaml new file mode 100644 index 0000000000000..47eddcc34d924 --- /dev/null +++ b/docs/changelog/103316.yaml @@ -0,0 +1,5 @@ +pr: 103316 +summary: Review KEEP logic to prevent duplicate column names +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/103325.yaml b/docs/changelog/103325.yaml new file mode 100644 index 0000000000000..7de6c41986490 --- /dev/null +++ b/docs/changelog/103325.yaml @@ -0,0 +1,6 @@ +pr: 103325 +summary: Added Duplicate Word Check Feature to Analysis Nori +area: Search +type: feature +issues: + - 103321 diff --git a/docs/changelog/103339.yaml b/docs/changelog/103339.yaml new file mode 100644 index 0000000000000..6ea1ab0cf799a --- /dev/null +++ b/docs/changelog/103339.yaml @@ -0,0 +1,6 @@ +pr: 103339 +summary: "ESQL: Fix resolution of MV_EXPAND after KEEP *" +area: ES|QL +type: bug +issues: + - 103331 diff --git a/docs/changelog/103340.yaml b/docs/changelog/103340.yaml new file mode 100644 index 0000000000000..21280dbfc857d --- /dev/null +++ b/docs/changelog/103340.yaml @@ -0,0 +1,5 @@ +pr: 103340 +summary: Avoid humongous blocks +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/103342.yaml b/docs/changelog/103342.yaml new file mode 100644 index 0000000000000..32711d7a6b390 --- /dev/null +++ b/docs/changelog/103342.yaml @@ -0,0 +1,5 @@ +pr: 103342 +summary: Use dataset size instead of on-disk size for data stream stats +area: Data streams +type: bug +issues: [] diff --git a/docs/changelog/103361.yaml b/docs/changelog/103361.yaml new file mode 100644 index 0000000000000..441acc09895ef --- /dev/null +++ b/docs/changelog/103361.yaml @@ -0,0 +1,5 @@ +pr: 103361 +summary: Prevent attempts to access non-existent node information during rebalancing +area: Machine Learning +type: bug +issues: [ ] diff --git a/docs/changelog/103387.yaml b/docs/changelog/103387.yaml new file mode 100644 index 0000000000000..77239fb9a3778 --- /dev/null +++ b/docs/changelog/103387.yaml @@ -0,0 +1,5 @@ +pr: 103387 +summary: Upgrade to Lucene 9.9.1 +area: Search +type: upgrade +issues: [] diff --git a/docs/changelog/103398.yaml b/docs/changelog/103398.yaml new file mode 100644 index 0000000000000..69452616ddc99 --- /dev/null +++ b/docs/changelog/103398.yaml @@ -0,0 +1,5 @@ +pr: 103398 +summary: ES|QL Async Query API +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/103408.yaml b/docs/changelog/103408.yaml new file mode 100644 index 0000000000000..bf5081b854f08 --- /dev/null +++ b/docs/changelog/103408.yaml @@ -0,0 +1,6 @@ +pr: 103408 +summary: Cache component versions +area: Infra/Core +type: bug +issues: + - 102103 diff --git a/docs/changelog/103427.yaml b/docs/changelog/103427.yaml new file mode 100644 index 0000000000000..57a27aa687ab7 --- /dev/null +++ b/docs/changelog/103427.yaml @@ -0,0 +1,5 @@ +pr: 103427 +summary: "[Connector API] Fix bug with nullable tooltip field in parser" +area: Application +type: bug +issues: [] diff --git a/docs/changelog/103430.yaml b/docs/changelog/103430.yaml new file mode 100644 index 0000000000000..cd2444270849d --- /dev/null +++ b/docs/changelog/103430.yaml @@ -0,0 +1,5 @@ +pr: 103430 +summary: "[Connectors API] Fix bug with missing TEXT `DisplayType` enum" +area: Application +type: bug +issues: [] diff --git a/docs/changelog/103434.yaml b/docs/changelog/103434.yaml new file mode 100644 index 0000000000000..56af604fe08f7 --- /dev/null +++ b/docs/changelog/103434.yaml @@ -0,0 +1,11 @@ +pr: 103434 +summary: Lower the `look_ahead_time` index setting's max value from 7 days to 2 hours. +area: TSDB +type: breaking +issues: [] +breaking: + title: Lower the `look_ahead_time` index setting's max value + area: Index setting + details: "Lower the `look_ahead_time` index setting's max value from 7 days to 2 hours." + impact: "Any value between 2 hours and 7 days will be as a look ahead time of 2 hours is defined" + notable: false diff --git a/docs/changelog/103435.yaml b/docs/changelog/103435.yaml new file mode 100644 index 0000000000000..95e3c7169ada9 --- /dev/null +++ b/docs/changelog/103435.yaml @@ -0,0 +1,5 @@ +pr: 103435 +summary: Dispatch `ClusterStateAction#buildResponse` to executor +area: Distributed +type: bug +issues: [] diff --git a/docs/changelog/103461.yaml b/docs/changelog/103461.yaml new file mode 100644 index 0000000000000..3a1bf30aa90c9 --- /dev/null +++ b/docs/changelog/103461.yaml @@ -0,0 +1,5 @@ +pr: 103461 +summary: Add support for Well Known Binary (WKB) in the fields API for spatial fields +area: Geo +type: enhancement +issues: [] diff --git a/docs/changelog/103474.yaml b/docs/changelog/103474.yaml new file mode 100644 index 0000000000000..a1da15a6bfbe5 --- /dev/null +++ b/docs/changelog/103474.yaml @@ -0,0 +1,6 @@ +pr: 103474 +summary: Fix now in millis for ESQL search contexts +area: ES|QL +type: bug +issues: + - 103455 diff --git a/docs/changelog/103508.yaml b/docs/changelog/103508.yaml new file mode 100644 index 0000000000000..9c6f79ef75657 --- /dev/null +++ b/docs/changelog/103508.yaml @@ -0,0 +1,5 @@ +pr: 103508 +summary: "[Connectors API] Fix `ClassCastException` when creating a new sync job" +area: Application +type: bug +issues: [] diff --git a/docs/changelog/103520.yaml b/docs/changelog/103520.yaml new file mode 100644 index 0000000000000..0ef7124eb1ed2 --- /dev/null +++ b/docs/changelog/103520.yaml @@ -0,0 +1,5 @@ +pr: 103520 +summary: Request indexing memory pressure in APM node metrics publisher +area: Distributed +type: bug +issues: [] diff --git a/docs/changelog/103530.yaml b/docs/changelog/103530.yaml new file mode 100644 index 0000000000000..6feb04467b03e --- /dev/null +++ b/docs/changelog/103530.yaml @@ -0,0 +1,5 @@ +pr: 103530 +summary: Exclude quantiles when fetching model snapshots where possible +area: Machine Learning +type: bug +issues: [] diff --git a/docs/changelog/103538.yaml b/docs/changelog/103538.yaml new file mode 100644 index 0000000000000..5aaed771d5ee4 --- /dev/null +++ b/docs/changelog/103538.yaml @@ -0,0 +1,6 @@ +pr: 103538 +summary: "ESQL: Improve pushdown of certain filters" +area: ES|QL +type: bug +issues: + - 103536 diff --git a/docs/changelog/103555.yaml b/docs/changelog/103555.yaml new file mode 100644 index 0000000000000..2b0dc2692e252 --- /dev/null +++ b/docs/changelog/103555.yaml @@ -0,0 +1,6 @@ +pr: 103555 +summary: "[Security Solution] Allow write permission for `kibana_system` role on endpoint\ + \ response index" +area: Authorization +type: enhancement +issues: [] diff --git a/docs/changelog/103574.yaml b/docs/changelog/103574.yaml new file mode 100644 index 0000000000000..ed6ad237f49a2 --- /dev/null +++ b/docs/changelog/103574.yaml @@ -0,0 +1,5 @@ +pr: 103574 +summary: Samples should check if the aggregations result is empty or null +area: EQL +type: bug +issues: [] diff --git a/docs/changelog/103580.yaml b/docs/changelog/103580.yaml new file mode 100644 index 0000000000000..6fd0328017d1f --- /dev/null +++ b/docs/changelog/103580.yaml @@ -0,0 +1,6 @@ +pr: 103580 +summary: Copy counter field properties to downsampled index +area: Downsampling +type: bug +issues: + - 103569 diff --git a/docs/changelog/103591.yaml b/docs/changelog/103591.yaml new file mode 100644 index 0000000000000..41b6e362c5713 --- /dev/null +++ b/docs/changelog/103591.yaml @@ -0,0 +1,6 @@ +pr: 103591 +summary: Wait for the model results on graceful shutdown +area: Machine Learning +type: bug +issues: + - 103414 diff --git a/docs/changelog/103592.yaml b/docs/changelog/103592.yaml new file mode 100644 index 0000000000000..21e06f1f5a10d --- /dev/null +++ b/docs/changelog/103592.yaml @@ -0,0 +1,5 @@ +pr: 103592 +summary: Remove deprecated Block APIs +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/103601.yaml b/docs/changelog/103601.yaml new file mode 100644 index 0000000000000..bf7aaaf835e00 --- /dev/null +++ b/docs/changelog/103601.yaml @@ -0,0 +1,7 @@ +pr: 103601 +summary: Introduce Elasticsearch `PostingFormat` based on Lucene 90 positing format + using PFOR +area: Search +type: bug +issues: + - 103002 diff --git a/docs/changelog/103611.yaml b/docs/changelog/103611.yaml new file mode 100644 index 0000000000000..51c77cd286d66 --- /dev/null +++ b/docs/changelog/103611.yaml @@ -0,0 +1,6 @@ +pr: 103611 +summary: Fix NPE on missing event queries +area: EQL +type: bug +issues: + - 103608 diff --git a/docs/changelog/103615.yaml b/docs/changelog/103615.yaml new file mode 100644 index 0000000000000..69498c749687f --- /dev/null +++ b/docs/changelog/103615.yaml @@ -0,0 +1,5 @@ +pr: 103615 +summary: Fix downsample api by returning a failure in case one or more downsample persistent tasks failed +area: Downsampling +type: bug +issues: [] diff --git a/docs/changelog/103628.yaml b/docs/changelog/103628.yaml new file mode 100644 index 0000000000000..42259c7bcde46 --- /dev/null +++ b/docs/changelog/103628.yaml @@ -0,0 +1,5 @@ +pr: 103628 +summary: Add ES|QL async delete API +area: ES|QL +type: enhancement +issues: [] diff --git a/docs/changelog/103633.yaml b/docs/changelog/103633.yaml new file mode 100644 index 0000000000000..9e36451caafd8 --- /dev/null +++ b/docs/changelog/103633.yaml @@ -0,0 +1,5 @@ +pr: 103633 +summary: Update s3 latency metric to use micros +area: Search +type: enhancement +issues: [] diff --git a/docs/changelog/103643.yaml b/docs/changelog/103643.yaml new file mode 100644 index 0000000000000..966fb57acf566 --- /dev/null +++ b/docs/changelog/103643.yaml @@ -0,0 +1,5 @@ +pr: 103643 +summary: "[Profiling] Use shard request cache consistently" +area: Application +type: enhancement +issues: [] diff --git a/docs/changelog/103646.yaml b/docs/changelog/103646.yaml new file mode 100644 index 0000000000000..b7a6fae025771 --- /dev/null +++ b/docs/changelog/103646.yaml @@ -0,0 +1,5 @@ +pr: 103646 +summary: Add index mapping parameter for `counted_keyword` +area: Aggregations +type: enhancement +issues: [] diff --git a/docs/changelog/103669.yaml b/docs/changelog/103669.yaml new file mode 100644 index 0000000000000..57361b9d842e4 --- /dev/null +++ b/docs/changelog/103669.yaml @@ -0,0 +1,5 @@ +pr: 103669 +summary: Validate inference model ids +area: Machine Learning +type: enhancement +issues: [] diff --git a/docs/changelog/103670.yaml b/docs/changelog/103670.yaml new file mode 100644 index 0000000000000..ad3f0519b5d19 --- /dev/null +++ b/docs/changelog/103670.yaml @@ -0,0 +1,5 @@ +pr: 103670 +summary: "ESQL: Improve local folding of aggregates" +area: ES|QL +type: bug +issues: [] diff --git a/docs/changelog/103710.yaml b/docs/changelog/103710.yaml new file mode 100644 index 0000000000000..539b9f553ccc2 --- /dev/null +++ b/docs/changelog/103710.yaml @@ -0,0 +1,5 @@ +pr: 103710 +summary: List hidden shard stores by default +area: Store +type: enhancement +issues: [] diff --git a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc index ce3d0a367dc4e..3efb8f6de9b3e 100644 --- a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc @@ -45,6 +45,13 @@ Use `synonyms_set` configuration option to provide a synonym set created via Syn } ---- +[WARNING] +====== +Synonyms sets must exist before they can be added to indices. +If an index is created referencing a nonexistent synonyms set, the index will remain in a partially created and inoperable state. +The only way to recover from this scenario is to ensure the synonyms set exists then either delete and re-create the index, or close and re-open the index. +====== + Use `synonyms_path` to provide a synonym file : [source,JSON] diff --git a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc index ce055d38092ff..046cd297b5092 100644 --- a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc @@ -33,6 +33,13 @@ Use `synonyms_set` configuration option to provide a synonym set created via Syn } ---- +[WARNING] +====== +Synonyms sets must exist before they can be added to indices. +If an index is created referencing a nonexistent synonyms set, the index will remain in a partially created and inoperable state. +The only way to recover from this scenario is to ensure the synonyms set exists then either delete and re-create the index, or close and re-open the index. +====== + Use `synonyms_path` to provide a synonym file : [source,JSON] diff --git a/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc new file mode 100644 index 0000000000000..6123b7eb5511d --- /dev/null +++ b/docs/reference/connector/apis/cancel-connector-sync-job-api.asciidoc @@ -0,0 +1,50 @@ +[[cancel-connector-sync-job-api]] +=== Cancel connector sync job API +++++ +Cancel connector sync job +++++ + +Cancels a connector sync job. + +[[cancel-connector-sync-job-api-request]] +==== {api-request-title} +`PUT _connector/_sync_job//_cancel` + +[[cancel-connector-sync-job-api-prereqs]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `connector_sync_job_id` parameter should reference an existing connector sync job. + +[[cancel-connector-sync-job-api-desc]] +==== {api-description-title} + +Cancels a connector sync job, which sets the `status` to `cancelling` and updates `cancellation_requested_at` to the current time. +The connector service is then responsible for setting the `status` of connector sync jobs to `cancelled`. + +[[cancel-connector-sync-job-api-path-params]] +==== {api-path-parms-title} + +`connector_sync_job_id`:: +(Required, string) + +[[cancel-connector-sync-job-api-response-codes]] +==== {api-response-codes-title} + +`200`:: +Connector sync job cancellation was successfully requested. + +`404`:: +No connector sync job matching `connector_sync_job_id` could be found. + +[[cancel-connector-sync-job-api-example]] +==== {api-examples-title} + +The following example cancels the connector sync job with ID `my-connector-sync-job-id`: + +[source,console] +---- +PUT _connector/_sync_job/my-connector-sync-job-id/_cancel +---- +// TEST[skip:there's no way to clean up after creating a connector sync job, as we don't know the id ahead of time. Therefore, skip this test.] + diff --git a/docs/reference/connector/apis/check-in-connector-api.asciidoc b/docs/reference/connector/apis/check-in-connector-api.asciidoc new file mode 100644 index 0000000000000..c0c021f1304dc --- /dev/null +++ b/docs/reference/connector/apis/check-in-connector-api.asciidoc @@ -0,0 +1,76 @@ +[[check-in-connector-api]] +=== Check in connector API + +preview::[] + +++++ +Check in a connector +++++ + +Updates the `last_seen` field of a connector with current timestamp. + +[[check-in-connector-api-request]] +==== {api-request-title} + +`PUT _connector//_check_in` + +[[check-in-connector-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `connector_id` parameter should reference an existing connector. + +[[check-in-connector-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + + +[[check-in-connector-api-response-codes]] +==== {api-response-codes-title} + +`200`:: +Connector `last_seen` field was successfully updated with a current timestamp. + +`400`:: +The `connector_id` was not provided. + +`404` (Missing resources):: +No connector matching `connector_id` could be found. + +[[check-in-connector-api-example]] +==== {api-examples-title} + +The following example updates the `last_seen` property with current timestamp for the connector with ID `my-connector`: + +//// +[source, console] +-------------------------------------------------- +PUT _connector/my-connector +{ + "index_name": "search-google-drive", + "name": "My Connector", + "service_type": "google_drive" +} +-------------------------------------------------- +// TESTSETUP + +[source,console] +-------------------------------------------------- +DELETE _connector/my-connector +-------------------------------------------------- +// TEARDOWN +//// + +[source,console] +---- +PUT _connector/my-connector/_check_in +---- + +[source,console-result] +---- +{ + "result": "updated" +} +---- diff --git a/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc new file mode 100644 index 0000000000000..04c8057e2c115 --- /dev/null +++ b/docs/reference/connector/apis/check-in-connector-sync-job-api.asciidoc @@ -0,0 +1,48 @@ +[[check-in-connector-sync-job-api]] +=== Check in connector sync job API +++++ +Check in connector sync job +++++ + +Checks in a connector sync job (updates `last_seen` to the current time). + +[[check-in-connector-sync-job-api-request]] +==== {api-request-title} +`PUT _connector/_sync_job//_check_in/` + +[[check-in-connector-sync-job-api-prereqs]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `connector_sync_job_id` parameter should reference an existing connector sync job. + +[[check-in-connector-sync-job-api-desc]] +==== {api-description-title} + +Checks in a connector sync job and sets `last_seen` to the time right before updating it in the internal index. + +[[check-in-connector-sync-job-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[[check-in-connector-sync-job-api-response-codes]] +==== {api-response-codes-title} + +`200`:: +Connector sync job was successfully checked in. + +`404`:: +No connector sync job matching `connector_sync_job_id` could be found. + +[[check-in-connector-sync-job-api-example]] +==== {api-examples-title} + +The following example checks in the connector sync job `my-connector-sync-job`: + +[source,console] +---- +PUT _connector/_sync_job/my-connector-sync-job/_check_in +---- +// TEST[skip:there's no way to clean up after creating a connector sync job, as we don't know the id ahead of time. Therefore, skip this test.] diff --git a/docs/reference/connector/apis/connector-apis.asciidoc b/docs/reference/connector/apis/connector-apis.asciidoc index a777d5919f71a..e127dc07446b5 100644 --- a/docs/reference/connector/apis/connector-apis.asciidoc +++ b/docs/reference/connector/apis/connector-apis.asciidoc @@ -23,6 +23,17 @@ You can use these APIs to create, get, delete and update connectors. Use the following APIs to manage connectors: * <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> [discrete] @@ -34,4 +45,33 @@ You can use these APIs to create, cancel, delete and update sync jobs. Use the following APIs to manage sync jobs: +* <> +* <> +* <> +* <> +* <> +* <> +* <> +* <> + + +include::cancel-connector-sync-job-api.asciidoc[] +include::check-in-connector-api.asciidoc[] +include::check-in-connector-sync-job-api.asciidoc[] include::create-connector-api.asciidoc[] +include::create-connector-sync-job-api.asciidoc[] +include::delete-connector-api.asciidoc[] +include::delete-connector-sync-job-api.asciidoc[] +include::get-connector-api.asciidoc[] +include::get-connector-sync-job-api.asciidoc[] +include::list-connectors-api.asciidoc[] +include::list-connector-sync-jobs-api.asciidoc[] +include::set-connector-sync-job-error-api.asciidoc[] +include::set-connector-sync-job-stats-api.asciidoc[] +include::update-connector-configuration-api.asciidoc[] +include::update-connector-error-api.asciidoc[] +include::update-connector-filtering-api.asciidoc[] +include::update-connector-last-sync-api.asciidoc[] +include::update-connector-name-description-api.asciidoc[] +include::update-connector-pipeline-api.asciidoc[] +include::update-connector-scheduling-api.asciidoc[] diff --git a/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc new file mode 100644 index 0000000000000..e8c2c364797c4 --- /dev/null +++ b/docs/reference/connector/apis/create-connector-sync-job-api.asciidoc @@ -0,0 +1,69 @@ +[[create-connector-sync-job-api]] +=== Create connector sync job API +++++ +Create connector sync job +++++ + +Creates a connector sync job. + +[source, console] +-------------------------------------------------- +POST _connector/_sync_job +{ + "id": "connector-id", + "job_type": "full", + "trigger_method": "on_demand" +} +-------------------------------------------------- +// TEST[skip:there's no way to clean up after this code snippet, as we don't know the id ahead of time] + + +[[create-connector-sync-job-api-request]] +==== {api-request-title} +`POST _connector/_sync_job` + + +[[create-connector-sync-job-api-prereqs]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `id` parameter should reference an existing connector. + +[[create-connector-sync-job-api-desc]] +==== {api-description-title} + +Creates a connector sync job document in the internal index and initializes its counters and timestamps with default values. +Certain values can be updated via the API. + +[role="child_attributes"] +[[create-connector-sync-job-api-request-body]] +==== {api-request-body-title} + +`id`:: +(Required, string) The id of the connector to create the sync job for. + +`job_type`:: +(Optional, string) The job type of the created sync job. Defaults to `full`. + +`trigger_method`:: +(Optional, string) The trigger method of the created sync job. Defaults to `on_demand`. + + +[role="child_attributes"] +[[create-connector-sync-job-api-response-body]] +==== {api-response-body-title} + +`id`:: +(string) The ID associated with the connector sync job document. + +[[create-connector-sync-job-api-response-codes]] +==== {api-response-codes-title} + +`201`:: +Indicates that the connector sync job was created successfully. + +`400`:: +Indicates that the request was malformed. + +`404`:: +Indicates that either the index or the referenced connector is missing. diff --git a/docs/reference/connector/apis/delete-connector-api.asciidoc b/docs/reference/connector/apis/delete-connector-api.asciidoc new file mode 100644 index 0000000000000..6d3a120df785a --- /dev/null +++ b/docs/reference/connector/apis/delete-connector-api.asciidoc @@ -0,0 +1,67 @@ +[[delete-connector-api]] +=== Delete connector API + +preview::[] + +++++ +Delete connector +++++ + +Removes a connector and its associated data. +This is a destructive action that is not recoverable. + +[[delete-connector-api-request]] +==== {api-request-title} + +`DELETE _connector/` + +[[delete-connector-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `connector_id` parameter should reference an existing connector. + +[[delete-connector-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[[delete-connector-api-response-codes]] +==== {api-response-codes-title} + +`400`:: +The `connector_id` was not provided. + +`404` (Missing resources):: +No connector matching `connector_id` could be found. + +[[delete-connector-api-example]] +==== {api-examples-title} + +The following example deletes the connector with ID `my-connector`: + +//// +[source, console] +-------------------------------------------------- +PUT _connector/my-connector +{ + "index_name": "search-google-drive", + "name": "My Connector", + "service_type": "google_drive" +} +-------------------------------------------------- +// TESTSETUP +//// + +[source,console] +---- +DELETE _connector/my-connector +---- + +[source,console-result] +---- +{ + "acknowledged": true +} +---- diff --git a/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc new file mode 100644 index 0000000000000..8641794576bf1 --- /dev/null +++ b/docs/reference/connector/apis/delete-connector-sync-job-api.asciidoc @@ -0,0 +1,54 @@ +[[delete-connector-sync-job-api]] +=== Delete connector sync job API + +preview::[] + +++++ +Delete connector sync job +++++ + +Removes a connector sync job and its associated data. +This is a destructive action that is not recoverable. + +[[delete-connector-sync-job-api-request]] +==== {api-request-title} + +`DELETE _connector/_sync_job/` + +[[delete-connector-sync-job-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. + +[[delete-connector-sync-job-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[[delete-connector-sync-job-api-response-codes]] +==== {api-response-codes-title} + +`400`:: +The `connector_sync_job_id` was not provided. + +`404`:: +No connector sync job matching `connector_sync_job_id` could be found. + +[[delete-connector-sync-job-api-example]] +==== {api-examples-title} + +The following example deletes the connector sync job with ID `my-connector-sync-job-id`: + +[source,console] +---- +DELETE _connector/_sync_job/my-connector-sync-job-id +---- +// TEST[skip:there's no way to clean up after this code snippet, as we don't know the ids of sync jobs ahead of time] + +[source,console-result] +---- +{ + "acknowledged": true +} +---- diff --git a/docs/reference/connector/apis/get-connector-api.asciidoc b/docs/reference/connector/apis/get-connector-api.asciidoc new file mode 100644 index 0000000000000..ab4a2758ce4f1 --- /dev/null +++ b/docs/reference/connector/apis/get-connector-api.asciidoc @@ -0,0 +1,63 @@ +[[get-connector-api]] +=== Get connector API +preview::[] +++++ +Get connector +++++ + +Retrieves the details about a connector. + +[[get-connector-api-request]] +==== {api-request-title} + +`GET _connector/` + +[[get-connector-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. + +[[get-connector-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[[get-connector-api-response-codes]] +==== {api-response-codes-title} + +`400`:: +The `connector_id` was not provided. + +`404` (Missing resources):: +No connector matching `connector_id` could be found. + +[[get-connector-api-example]] +==== {api-examples-title} + +The following example gets the connector `my-connector`: + +//// +[source,console] +-------------------------------------------------- +PUT _connector/my-connector +{ + "index_name": "search-google-drive", + "name": "Google Drive Connector", + "service_type": "google_drive" +} + +-------------------------------------------------- +// TESTSETUP + +[source,console] +-------------------------------------------------- +DELETE _connector/my-connector +-------------------------------------------------- +// TEARDOWN +//// + +[source,console] +---- +GET _connector/my-connector +---- diff --git a/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc b/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc new file mode 100644 index 0000000000000..b33aec8c55e60 --- /dev/null +++ b/docs/reference/connector/apis/get-connector-sync-job-api.asciidoc @@ -0,0 +1,44 @@ +[[get-connector-sync-job-api]] +=== Get connector sync job API +preview::[] +++++ +Get connector sync job +++++ + +Retrieves the details about a connector sync job. + +[[get-connector-sync-job-api-request]] +==== {api-request-title} + +`GET _connector/_sync_job/` + +[[get-connector-sync-job-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. + +[[get-connector-sync-job-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[[get-connector-sync-job-api-response-codes]] +==== {api-response-codes-title} + +`400`:: +The `connector_sync_job_id` was not provided. + +`404` (Missing resources):: +No connector sync job matching `connector_sync_job_id` could be found. + +[[get-connector-sync-job-api-example]] +==== {api-examples-title} + +The following example gets the connector sync job `my-connector-sync-job`: + +[source,console] +---- +GET _connector/_sync_job/my-connector-sync-job +---- +// TEST[skip:there's no way to clean up after creating a connector sync job, as we don't know the id ahead of time. Therefore, skip this test.] diff --git a/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc new file mode 100644 index 0000000000000..8b88f318f5304 --- /dev/null +++ b/docs/reference/connector/apis/list-connector-sync-jobs-api.asciidoc @@ -0,0 +1,80 @@ +[role="xpack"] +[[list-connector-sync-jobs-api]] +=== List connector sync jobs API + +preview::[] + +++++ +List connector sync jobs +++++ + +Returns information about all stored connector sync jobs ordered by their creation date in ascending order. + + +[[list-connector-sync-jobs-api-request]] +==== {api-request-title} + +`GET _connector/_sync_job` + +[[list-connector-sync-jobs-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. + +[[list-connector-sync-jobs-api-path-params]] +==== {api-path-parms-title} + +`size`:: +(Optional, integer) Maximum number of results to retrieve. Defaults to `100`. + +`from`:: +(Optional, integer) The offset from the first result to fetch. Defaults to `0`. + +`status`:: +(Optional, job status) The job status the fetched sync jobs need to have. + +`connector_id`:: +(Optional, string) The connector id the fetched sync jobs need to have. + +[[list-connector-sync-jobs-api-example]] +==== {api-examples-title} + +The following example lists all connector sync jobs: + + +[source,console] +---- +GET _connector/_sync_job +---- +// TEST[skip:there's no way to clean up after this code snippet, as we don't know the ids of sync jobs ahead of time] + +The following example lists the first two connector sync jobs: + +[source,console] +---- +GET _connector/_sync_job?from=0&size=2 +---- +// TEST[skip:there's no way to clean up after this code snippet, as we don't know the ids of sync jobs ahead of time] + +The following example lists pending connector sync jobs (the first 100 per default): +[source,console] +---- +GET _connector/_sync_job?status=pending +---- +// TEST[skip:there's no way to clean up after this code snippet, as we don't know the ids of sync jobs ahead of time] + +The following example lists connector sync jobs (the first 100 per default) for the connector with id `connector-1`: +[source,console] +---- +GET _connector/_sync_job?connector_id=connector-1 +---- +// TEST[skip:there's no way to clean up after this code snippet, as we don't know the ids of sync jobs ahead of time] + +[[list-connector-sync-jobs-api-response-codes]] +==== {api-response-codes-title} + +`200`: +Indicates that results were successfully returned (results can also be empty). + +`400`: +Indicates that the request was malformed. diff --git a/docs/reference/connector/apis/list-connectors-api.asciidoc b/docs/reference/connector/apis/list-connectors-api.asciidoc new file mode 100644 index 0000000000000..57d3cc47aeb7a --- /dev/null +++ b/docs/reference/connector/apis/list-connectors-api.asciidoc @@ -0,0 +1,77 @@ +[role="xpack"] +[[list-connector-api]] +=== List connectors API + +preview::[] + +++++ +List connectors +++++ + +Returns information about all stored connectors. + + +[[list-connector-api-request]] +==== {api-request-title} + +`GET _connector` + +[[list-connector-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. + +[[list-connector-api-path-params]] +==== {api-path-parms-title} + +`size`:: +(Optional, integer) Maximum number of results to retrieve. + +`from`:: +(Optional, integer) The offset from the first result to fetch. + +[[list-connector-api-example]] +==== {api-examples-title} + +The following example lists all connectors: + +//// +[source,console] +-------------------------------------------------- +PUT _connector/connector-1 +{ + "index_name": "search-google-drive", + "name": "Google Drive Connector", + "service_type": "google_drive" +} + +PUT _connector/connector-2 +{ + "index_name": "search-sharepoint-online", + "name": "Sharepoint Online Connector", + "service_type": "sharepoint_online" +} + +-------------------------------------------------- +// TESTSETUP + +[source,console] +-------------------------------------------------- +DELETE _connector/connector-1 + +DELETE _connector/connector-2 +-------------------------------------------------- +// TEARDOWN +//// + +[source,console] +---- +GET _connector +---- + +The following example lists the first two connectors: + +[source,console] +---- +GET _connector/?from=0&size=2 +---- diff --git a/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc b/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc new file mode 100644 index 0000000000000..935fcccc77fcf --- /dev/null +++ b/docs/reference/connector/apis/set-connector-sync-job-error-api.asciidoc @@ -0,0 +1,58 @@ +[[set-connector-sync-job-error-api]] +=== Set connector sync job error API +++++ +Set connector sync job error +++++ + +Sets a connector sync job error. + +[[set-connector-sync-job-error-api-request]] +==== {api-request-title} +`PUT _connector/_sync_job//_error` + +[[set-connector-sync-job-error-api-prereqs]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `connector_sync_job_id` parameter should reference an existing connector sync job. + +[[set-connector-sync-job-error-api-desc]] +==== {api-description-title} + +Sets the `error` field for the specified connector sync job and sets its `status` to `error`. + +[[set-connector-sync-job-error-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[role="child_attributes"] +[[set-connector-sync-job-error-api-request-body]] +==== {api-request-body-title} + +`error`:: +(Required, string) The error to set the connector sync job `error` field to. + +[[set-connector-sync-job-api-response-codes]] +==== {api-response-codes-title} + +`200`:: +Indicates that the connector sync job error was set successfully. + +`404`:: +No connector sync job matching `connector_sync_job_id` could be found. + +[[set-connector-sync-job-error-api-example]] +==== {api-examples-title} + +The following example sets the error `some-error` in the connector sync job `my-connector-sync-job`: + +[source,console] +---- +PUT _connector/_sync_job/my-connector-sync-job/_error +{ + "error": "some-error" +} +---- +// TEST[skip:there's no way to clean up after creating a connector sync job, as we don't know the id ahead of time. Therefore, skip this test.] diff --git a/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc b/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc new file mode 100644 index 0000000000000..0513155312bb4 --- /dev/null +++ b/docs/reference/connector/apis/set-connector-sync-job-stats-api.asciidoc @@ -0,0 +1,77 @@ +[[set-connector-sync-job-stats-api]] +=== Set connector sync job stats API +++++ +Set connector sync job stats +++++ + +Sets connector sync job stats. + +[[set-connector-sync-job-stats-api-request]] +==== {api-request-title} +`PUT _connector/_sync_job//_stats` + +[[set-connector-sync-job-stats-api-prereqs]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `connector_sync_job_id` parameter should reference an existing connector sync job. + +[[set-connector-sync-job-stats-api-desc]] +==== {api-description-title} + +Sets the stats for a connector sync job. +Stats include: `deleted_document_count`, `indexed_document_count`, `indexed_document_volume` and `total_document_count`. +`last_seen` can also be updated using this API. +This API is mainly used by the connector service for updating sync job information. + +[[set-connector-sync-job-stats-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[role="child_attributes"] +[[set-connector-sync-job-stats-api-request-body]] +==== {api-request-body-title} + +`deleted_document_count`:: +(Required, int) The number of documents the sync job deleted. + +`indexed_document_count`:: +(Required, int) The number of documents the sync job indexed. + +`indexed_document_volume`:: +(Required, int) The total size of the data (in MiB) the sync job indexed. + +`total_document_count`:: +(Optional, int) The total number of documents in the target index after the sync job finished. + +`last_seen`:: +(Optional, instant) The timestamp to set the connector sync job's `last_seen` property. + +[[set-connector-sync-job-stats-api-response-codes]] +==== {api-response-codes-title} + +`200`:: +Indicates that the connector sync job stats were successfully updated. + +`404`:: +No connector sync job matching `connector_sync_job_id` could be found. + +[[set-connector-sync-job-stats-api-example]] +==== {api-examples-title} + +The following example sets all mandatory and optional stats for the connector sync job `my-connector-sync-job`: + +[source,console] +---- +PUT _connector/_sync_job/my-connector-sync-job/_stats +{ + "deleted_document_count": 10, + "indexed_document_count": 20, + "indexed_document_volume": 1000, + "total_document_count": 2000, + "last_seen": "2023-01-02T10:00:00Z" +} +---- +// TEST[skip:there's no way to clean up after creating a connector sync job, as we don't know the id ahead of time. Therefore, skip this test.] diff --git a/docs/reference/connector/apis/update-connector-configuration-api.asciidoc b/docs/reference/connector/apis/update-connector-configuration-api.asciidoc new file mode 100644 index 0000000000000..6d6591a6f00bc --- /dev/null +++ b/docs/reference/connector/apis/update-connector-configuration-api.asciidoc @@ -0,0 +1,154 @@ +[[update-connector-configuration-api]] +=== Update connector configuration API + +preview::[] + +++++ +Update connector configuration +++++ + +Updates the `configuration` of a connector. + + +[[update-connector-configuration-api-request]] +==== {api-request-title} + +`PUT _connector//_configuration` + +[[update-connector-configuration-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `connector_id` parameter should reference an existing connector. +* The configuration fields definition must be compatible with the specific connector type being used. + +[[update-connector-configuration-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[role="child_attributes"] +[[update-connector-configuration-api-request-body]] +==== {api-request-body-title} + +`configuration`:: +(Required, object) The configuration for the connector. The configuration field is a map where each key represents a specific configuration field name, and the value is a `ConnectorConfiguration` object. + +Each `ConnectorConfiguration` object contains the following attributes: + +* `category` (Optional, string) The category of the configuration field. This helps in grouping related configurations together in the user interface. + +* `default_value` (Required, string | number | bool) The default value for the configuration. This value is used if the value field is empty, applicable only for non-required fields. + +* `depends_on` (Required, array of `ConfigurationDependency`) An array of dependencies on other configurations. A field will not be enabled unless these dependencies are met. Each dependency specifies a field key and the required value for the dependency to be considered fulfilled. + +* `display` (Required, string) The display type for the UI element that represents this configuration. This defines how the field should be rendered in the user interface. Supported types are: `text`, `textbox`, `textarea`, `numeric`, `toggle` and `dropdown`. + +* `label` (Required, string) The display label for the configuration field. This label is shown in the user interface, adjacent to the field. + +* `options` (Required, array of `ConfigurationSelectOption`) An array of options for list-type fields. These options are used for inputs in the user interface, each having a label for display and a value. + +* `order` (Required, number) The order in which this configuration appears in the user interface. This helps in organizing fields logically. + +* `placeholder` (Required, string) Placeholder text for the configuration field. This text is displayed inside the field before a value is entered. + +* `required` (Required, boolean) Indicates whether the configuration is mandatory. If true, a value must be provided for the field. + +* `sensitive` (Required, boolean) Indicates whether the configuration contains sensitive information. Sensitive fields may be obfuscated in the user interface. + +* `tooltip` (Optional, string) Tooltip text providing additional information about the configuration. This text appears when the user hovers over the info icon next to the configuration field. + +* `type` (Required, string) The type of the configuration field, such as `str`, `int`, `bool`, `list`. This defines the data type and format of the field's value. + +* `ui_restrictions` (Required, array of strings) A list of UI restrictions. These restrictions define where in the user interface this field should be available or restricted. + +* `validations` (Required, array of `ConfigurationValidation`) An array of rules for validating the field's value. Each validation specifies a type and a constraint that the field's value must meet. + +* `value` (Required, string | number | bool) The current value of the configuration. This is the actual value set for the field and is used by the connector during its operations. + +`ConfigurationDependency` represents a dependency that a configuration field has on another field's value. It contains the following attributes: + +* `field` (Required, string) The name of the field in the configuration that this dependency relates to. + +* `value` (Required, string | number | bool) The required value of the specified field for this dependency to be met. + +`ConfigurationSelectOption` defines an option within a selectable configuration field. It contains the following attributes: + +* `label` (Required, string) The display label for the option. + +* `value` (Required, string) The actual value associated with the option. + +`ConfigurationValidation` specifies validation rules for configuration fields. Each ConfigurationValidation instance enforces a specific type of validation based on its type and constraint. It contains the following attributes: + +* `constraint` (Required, string | number) The validation constraint. The nature of this constraint depends on the validation type. It could be a numeric value, a list, a regular expression pattern. + +* `type` (Required, ConfigurationValidationType) The type of validation to be performed. Possible values include: `less_than`, `greater_than`, `list_type`, `included_in`, `regex` and `unset`. + + +[[update-connector-configuration-api-response-codes]] +==== {api-response-codes-title} + +`200`:: +Connector configuration was successfully updated. + +`400`:: +The `connector_id` was not provided or the request payload was malformed. + +`404` (Missing resources):: +No connector matching `connector_id` could be found. + +[[update-connector-configuration-api-example]] +==== {api-examples-title} + +The following example updates the `configuration` for the connector with ID `my-connector`: + +//// +[source, console] +-------------------------------------------------- +PUT _connector/my-connector +{ + "index_name": "search-google-drive", + "name": "My Connector", + "service_type": "google_drive" +} +-------------------------------------------------- +// TESTSETUP + +[source,console] +-------------------------------------------------- +DELETE _connector/my-connector +-------------------------------------------------- +// TEARDOWN +//// + +[source,console] +---- +PUT _connector/my-connector/_configuration +{ + "configuration": { + "service_account_credentials": { + "default_value": null, + "depends_on": [], + "display": "textarea", + "label": "Google Drive service account JSON", + "options": [], + "order": 1, + "required": true, + "sensitive": true, + "tooltip": "This connectors authenticates as a service account to synchronize content from Google Drive.", + "type": "str", + "ui_restrictions": [], + "validations": [], + "value": "...service account JSON..." + } + } +} +---- + +[source,console-result] +---- +{ + "result": "updated" +} +---- diff --git a/docs/reference/connector/apis/update-connector-error-api.asciidoc b/docs/reference/connector/apis/update-connector-error-api.asciidoc new file mode 100644 index 0000000000000..19bc15f0dc60a --- /dev/null +++ b/docs/reference/connector/apis/update-connector-error-api.asciidoc @@ -0,0 +1,86 @@ +[[update-connector-error-api]] +=== Update connector error API + +preview::[] + +++++ +Update connector error +++++ + +Updates the `error` field of a connector. + +[[update-connector-error-api-request]] +==== {api-request-title} + +`PUT _connector//_error` + +[[update-connector-error-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `connector_id` parameter should reference an existing connector. + +[[update-connector-error-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[role="child_attributes"] +[[update-connector-error-api-request-body]] +==== {api-request-body-title} + +`error`:: +(Required, string) A messaged related to the last error encountered by the connector. + + +[[update-connector-error-api-response-codes]] +==== {api-response-codes-title} + +`200`:: +Connector `error` field was successfully updated. + +`400`:: +The `connector_id` was not provided or the request payload was malformed. + +`404` (Missing resources):: +No connector matching `connector_id` could be found. + +[[update-connector-error-api-example]] +==== {api-examples-title} + +The following example updates the `error` field for the connector with ID `my-connector`: + +//// +[source, console] +-------------------------------------------------- +PUT _connector/my-connector +{ + "index_name": "search-google-drive", + "name": "My Connector", + "service_type": "google_drive" +} +-------------------------------------------------- +// TESTSETUP + +[source,console] +-------------------------------------------------- +DELETE _connector/my-connector +-------------------------------------------------- +// TEARDOWN +//// + +[source,console] +---- +PUT _connector/my-connector/_error +{ + "error": "Houston, we have a problem!" +} +---- + +[source,console-result] +---- +{ + "result": "updated" +} +---- diff --git a/docs/reference/connector/apis/update-connector-filtering-api.asciidoc b/docs/reference/connector/apis/update-connector-filtering-api.asciidoc new file mode 100644 index 0000000000000..d4c7bb16a3304 --- /dev/null +++ b/docs/reference/connector/apis/update-connector-filtering-api.asciidoc @@ -0,0 +1,186 @@ +[[update-connector-filtering-api]] +=== Update connector filtering API + +preview::[] + +++++ +Update connector filtering +++++ + +Updates the `filtering` configuration of a connector. Learn more about filtering in the {enterprise-search-ref}/sync-rules.html[sync rules] documentation. + +[[update-connector-filtering-api-request]] +==== {api-request-title} + +`PUT _connector//_filtering` + +[[update-connector-filtering-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `connector_id` parameter should reference an existing connector. + +[[update-connector-filtering-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[role="child_attributes"] +[[update-connector-filtering-api-request-body]] +==== {api-request-body-title} + +`filtering`:: +(Required, array) The filtering configuration for the connector. This configuration determines the set of rules applied for filtering data during syncs. + +Each entry in the `filtering` array represents a set of filtering rules for a specific data domain and includes the following attributes: + +- `domain` (Required, string) + +Specifies the data domain to which these filtering rules apply. + +- `active` (Required, object) + +Contains the set of rules that are actively used for sync jobs. The `active` object includes: + + * `rules` (Required, array of objects) + + An array of individual filtering rule objects, each with the following sub-attributes: + ** `id` (Required, string) + + A unique identifier for the rule. + ** `policy` (Required, string) + + Specifies the policy, such as "include" or "exclude". + ** `field` (Required, string) + + The field in the document to which this rule applies. + ** `rule` (Required, string) + + The type of rule, such as "regex", "starts_with", "ends_with", "contains", "equals", "<", ">", etc. + ** `value` (Required, string) + + The value to be used in conjunction with the rule for matching the contents of the document's field. + ** `order` (Required, number) + + The order in which the rules are applied. The first rule to match has its policy applied. + ** `created_at` (Optional, datetime) + + The timestamp when the rule was added. + ** `updated_at` (Optional, datetime) + + The timestamp when the rule was last edited. + + * `advanced_snippet` (Optional, object) + + Used for {enterprise-search-ref}/sync-rules.html#sync-rules-advanced[advanced filtering] at query time, with the following sub-attributes: + ** `value` (Required, object) + + A JSON object passed directly to the connector for advanced filtering. + ** `created_at` (Optional, datetime) + + The timestamp when this JSON object was created. + ** `updated_at` (Optional, datetime) + + The timestamp when this JSON object was last edited. + + * `validation` (Optional, object) + + Provides validation status for the rules, including: + ** `state` (Required, string) + + Indicates the validation state: "edited", "valid", or "invalid". + ** `errors` (Optional, object) + + Contains details about any validation errors, with sub-attributes: + *** `ids` (Required, string) + + The ID(s) of any rules deemed invalid. + *** `messages` (Required, string) + + Messages explaining what is invalid about the rules. + +- `draft` (Optional, object) + +An object identical in structure to the `active` object, but used for drafting and editing filtering rules before they become active. + + +[[update-connector-filtering-api-response-codes]] +==== {api-response-codes-title} + +`200`:: +Connector `filtering` field was successfully updated. + +`400`:: +The `connector_id` was not provided or the request payload was malformed. + +`404` (Missing resources):: +No connector matching `connector_id` could be found. + +[[update-connector-filtering-api-example]] +==== {api-examples-title} + +The following example updates the `filtering` property for the connector with ID `my-connector`: + +//// +[source, console] +-------------------------------------------------- +PUT _connector/my-connector +{ + "index_name": "search-google-drive", + "name": "My Connector", + "service_type": "google_drive" +} +-------------------------------------------------- +// TESTSETUP + +[source,console] +-------------------------------------------------- +DELETE _connector/my-connector +-------------------------------------------------- +// TEARDOWN +//// + +[source,console] +---- +PUT _connector/my-connector/_filtering +{ + "filtering": [ + { + "active": { + "advanced_snippet": { + "created_at": "2023-11-09T15:13:08.231Z", + "updated_at": "2023-11-09T15:13:08.231Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-11-09T15:13:08.231Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-11-09T15:13:08.231Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + }, + "domain": "DEFAULT", + "draft": { + "advanced_snippet": { + "created_at": "2023-11-09T15:13:08.231Z", + "updated_at": "2023-11-09T15:13:08.231Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-11-09T15:13:08.231Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-11-09T15:13:08.231Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + } + } + ] +} +---- + +[source,console-result] +---- +{ + "result": "updated" +} +---- diff --git a/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc b/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc new file mode 100644 index 0000000000000..e9fffd22b21cd --- /dev/null +++ b/docs/reference/connector/apis/update-connector-last-sync-api.asciidoc @@ -0,0 +1,135 @@ +[[update-connector-last-sync-api]] +=== Update connector last sync stats API + +preview::[] + +++++ +Update connector last sync stats +++++ + +Updates the fields related to the last sync of a connector. + +This action is used for analytics and monitoring. + +[[update-connector-last-sync-api-request]] +==== {api-request-title} + +`PUT _connector//_last_sync` + +[[update-connector-last-sync-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `connector_id` parameter should reference an existing connector. + +[[update-connector-last-sync-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[role="child_attributes"] +[[update-connector-last-sync-api-request-body]] +==== {api-request-body-title} + +`last_access_control_sync_error`:: +(Optional, string) The last error message related to access control sync, if any. + +`last_access_control_sync_scheduled_at`:: +(Optional, datetime) The datetime indicating when the last access control sync was scheduled. + +`last_access_control_sync_status`:: +(Optional, ConnectorSyncStatus) The status of the last access control sync. + +`last_deleted_document_count`:: +(Optional, long) The number of documents deleted in the last sync process. + +`last_incremental_sync_scheduled_at`:: +(Optional, datetime) The datetime when the last incremental sync was scheduled. + +`last_indexed_document_count`:: +(Optional, long) The number of documents indexed in the last sync. + +`last_sync_error`:: +(Optional, string) The last error message encountered during a sync process, if any. + +`last_sync_scheduled_at`:: +(Optional, datetime) The datetime when the last sync was scheduled. + +`last_sync_status`:: +(Optional, ConnectorSyncStatus) The status of the last sync. + +`last_synced`:: +(Optional, datetime) The datetime of the last successful synchronization. + + +The value of `ConnectorSyncStatus` is one of the following lowercase strings representing different sync states: + +* `canceling`: The sync process is in the process of being canceled. +* `canceled`: The sync process has been canceled. +* `completed`: The sync process completed successfully. +* `error`: An error occurred during the sync process. +* `in_progress`: The sync process is currently underway. +* `pending`: The sync is pending and has not yet started. +* `suspended`: The sync process has been temporarily suspended. + + +[[update-connector-last-sync-api-response-codes]] +==== {api-response-codes-title} + +`200`:: +Connector last sync stats were successfully updated. + +`400`:: +The `connector_id` was not provided or the request payload was malformed. + +`404` (Missing resources):: +No connector matching `connector_id` could be found. + +[[update-connector-last-sync-api-example]] +==== {api-examples-title} + +The following example updates the last sync stats for the connector with ID `my-connector`: + +//// +[source, console] +-------------------------------------------------- +PUT _connector/my-connector +{ + "index_name": "search-google-drive", + "name": "My Connector", + "service_type": "google_drive" +} +-------------------------------------------------- +// TESTSETUP + +[source,console] +-------------------------------------------------- +DELETE _connector/my-connector +-------------------------------------------------- +// TEARDOWN +//// + +[source,console] +---- +PUT _connector/my-connector/_last_sync +{ + "last_access_control_sync_error": "Houston, we have a problem!", + "last_access_control_sync_scheduled_at": "2023-11-09T15:13:08.231Z", + "last_access_control_sync_status": "pending", + "last_deleted_document_count": 42, + "last_incremental_sync_scheduled_at": "2023-11-09T15:13:08.231Z", + "last_indexed_document_count": 42, + "last_sync_error": "Houston, we have a problem!", + "last_sync_scheduled_at": "2024-11-09T15:13:08.231Z", + "last_sync_status": "completed", + "last_synced": "2024-11-09T15:13:08.231Z" +} +---- + +[source,console-result] +---- +{ + "result": "updated" +} +---- diff --git a/docs/reference/connector/apis/update-connector-name-description-api.asciidoc b/docs/reference/connector/apis/update-connector-name-description-api.asciidoc new file mode 100644 index 0000000000000..d45fb545e168b --- /dev/null +++ b/docs/reference/connector/apis/update-connector-name-description-api.asciidoc @@ -0,0 +1,90 @@ +[[update-connector-name-description-api]] +=== Update connector name and description API + +preview::[] + +++++ +Update connector name and description +++++ + +Updates the `name` and `description` fields of a connector. + +[[update-connector-name-description-api-request]] +==== {api-request-title} + +`PUT _connector//_name` + +[[update-connector-name-description-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `connector_id` parameter should reference an existing connector. + +[[update-connector-name-description-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[role="child_attributes"] +[[update-connector-name-description-api-request-body]] +==== {api-request-body-title} + +`name`:: +(Required, string) Name of the connector. + +`description`:: +(Optional, string) Description of the connector. + + +[[update-connector-name-description-api-response-codes]] +==== {api-response-codes-title} + +`200`:: +Connector `name` and `description` fields were successfully updated. + +`400`:: +The `connector_id` was not provided or the request payload was malformed. + +`404` (Missing resources):: +No connector matching `connector_id` could be found. + +[[update-connector-name-description-api-example]] +==== {api-examples-title} + +The following example updates the `name` and `description` fields for the connector with ID `my-connector`: + +//// +[source, console] +-------------------------------------------------- +PUT _connector/my-connector +{ + "index_name": "search-google-drive", + "name": "My Connector", + "service_type": "google_drive" +} +-------------------------------------------------- +// TESTSETUP + +[source,console] +-------------------------------------------------- +DELETE _connector/my-connector +-------------------------------------------------- +// TEARDOWN +//// + +[source,console] +---- +PUT _connector/my-connector/_name +{ + "name": "Custom connector", + "description": "This is my customized connector" +} +---- + +[source,console-result] +---- +{ + "result": "updated" +} +---- diff --git a/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc b/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc new file mode 100644 index 0000000000000..6938506703da8 --- /dev/null +++ b/docs/reference/connector/apis/update-connector-pipeline-api.asciidoc @@ -0,0 +1,103 @@ +[[update-connector-pipeline-api]] +=== Update connector pipeline API + +preview::[] + +++++ +Update connector pipeline +++++ + +Updates the `pipeline` configuration of a connector. + +When you create a new connector, the configuration of an <> is populated with default settings. + +[[update-connector-pipeline-api-request]] +==== {api-request-title} + +`PUT _connector//_pipeline` + +[[update-connector-pipeline-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `connector_id` parameter should reference an existing connector. + +[[update-connector-pipeline-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[role="child_attributes"] +[[update-connector-pipeline-api-request-body]] +==== {api-request-body-title} + +`pipeline`:: +(Required, object) The pipeline configuration of the connector. The pipeline determines how data is processed during ingestion into Elasticsearch. + +Pipeline configuration must include the following attributes: + +- `extract_binary_content` (Required, boolean) A flag indicating whether to extract binary content during ingestion. + +- `name` (Required, string) The name of the ingest pipeline. + +- `reduce_whitespace` (Required, boolean) A flag indicating whether to reduce extra whitespace in the ingested content. + +- `run_ml_inference` (Required, boolean) A flag indicating whether to run machine learning inference on the ingested content. + + +[[update-connector-pipeline-api-response-codes]] +==== {api-response-codes-title} + +`200`:: +Connector `pipeline` field was successfully updated. + +`400`:: +The `connector_id` was not provided or the request payload was malformed. + +`404` (Missing resources):: +No connector matching `connector_id` could be found. + +[[update-connector-pipeline-api-example]] +==== {api-examples-title} + +The following example updates the `pipeline` property for the connector with ID `my-connector`: + +//// +[source, console] +-------------------------------------------------- +PUT _connector/my-connector +{ + "index_name": "search-google-drive", + "name": "My Connector", + "service_type": "google_drive" +} +-------------------------------------------------- +// TESTSETUP + +[source,console] +-------------------------------------------------- +DELETE _connector/my-connector +-------------------------------------------------- +// TEARDOWN +//// + +[source,console] +---- +PUT _connector/my-connector/_pipeline +{ + "pipeline": { + "extract_binary_content": true, + "name": "my-connector-pipeline", + "reduce_whitespace": true, + "run_ml_inference": true + } +} +---- + +[source,console-result] +---- +{ + "result": "updated" +} +---- diff --git a/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc b/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc new file mode 100644 index 0000000000000..c47e6d4c0367b --- /dev/null +++ b/docs/reference/connector/apis/update-connector-scheduling-api.asciidoc @@ -0,0 +1,113 @@ +[[update-connector-scheduling-api]] +=== Update connector scheduling API + +preview::[] + +++++ +Update connector scheduling +++++ + +Updates the `scheduling` configuration of a connector. + +[[update-connector-scheduling-api-request]] +==== {api-request-title} + +`PUT _connector//_scheduling` + +[[update-connector-scheduling-api-prereq]] +==== {api-prereq-title} + +* To sync data using connectors, it's essential to have the Elastic connectors service running. +* The `connector_id` parameter should reference an existing connector. + +[[update-connector-scheduling-api-path-params]] +==== {api-path-parms-title} + +``:: +(Required, string) + +[role="child_attributes"] +[[update-connector-scheduling-api-request-body]] +==== {api-request-body-title} + +`scheduling`:: +(Required, object) The scheduling configuration for the connector. This configuration determines frequency of synchronization operations for the connector. + +The scheduling configuration includes the following attributes, each represented as a `ScheduleConfig` object: + +- `access_control` (Required, `ScheduleConfig` object) Defines the schedule for synchronizing access control settings of the connector. + +- `full` (Required, `ScheduleConfig` object) Defines the schedule for a full content syncs. + +- `incremental` (Required, `ScheduleConfig` object) Defines the schedule for incremental content syncs. + +Each `ScheduleConfig` object includes the following sub-attributes: + + - `enabled` (Required, boolean) A flag that enables or disables the scheduling. + + - `interval` (Required, string) A CRON expression representing the sync schedule. This expression defines the grequency at which the sync operations should occur. It must be provided in a valid CRON format. + + +[[update-connector-scheduling-api-response-codes]] +==== {api-response-codes-title} + +`200`:: +Connector `scheduling` field was successfully updated. + +`400`:: +The `connector_id` was not provided or the request payload was malformed. + +`404` (Missing resources):: +No connector matching `connector_id` could be found. + +[[update-connector-scheduling-api-example]] +==== {api-examples-title} + +The following example updates the `scheduling` property for the connector with ID `my-connector`: + +//// +[source, console] +-------------------------------------------------- +PUT _connector/my-connector +{ + "index_name": "search-google-drive", + "name": "My Connector", + "service_type": "google_drive" +} +-------------------------------------------------- +// TESTSETUP + +[source,console] +-------------------------------------------------- +DELETE _connector/my-connector +-------------------------------------------------- +// TEARDOWN +//// + +[source,console] +---- +PUT _connector/my-connector/_scheduling +{ + "scheduling": { + "access_control": { + "enabled": true, + "interval": "0 10 0 * * ?" + }, + "full": { + "enabled": true, + "interval": "0 20 0 * * ?" + }, + "incremental": { + "enabled": false, + "interval": "0 30 0 * * ?" + } + } +} +---- + +[source,console-result] +---- +{ + "result": "updated" +} +---- diff --git a/docs/reference/data-streams/set-up-tsds.asciidoc b/docs/reference/data-streams/set-up-tsds.asciidoc index c175da2e991e9..ed6b79653e61f 100644 --- a/docs/reference/data-streams/set-up-tsds.asciidoc +++ b/docs/reference/data-streams/set-up-tsds.asciidoc @@ -176,9 +176,7 @@ PUT _component_template/my-weather-sensor-mappings Optionally, the index settings component template for a TSDS can include: * Your lifecycle policy in the `index.lifecycle.name` index setting. -* The <> index setting. -* The <> index setting. -* Other index settings, such as <>, for your TSDS's +* Other index settings, such as <>, for your TSDS's backing indices. IMPORTANT: Don't specify the `index.routing_path` index setting in a component @@ -191,8 +189,7 @@ PUT _component_template/my-weather-sensor-settings { "template": { "settings": { - "index.lifecycle.name": "my-lifecycle-policy", - "index.look_ahead_time": "3h" + "index.lifecycle.name": "my-lifecycle-policy" } }, "_meta": { diff --git a/docs/reference/data-streams/tsds-index-settings.asciidoc b/docs/reference/data-streams/tsds-index-settings.asciidoc index c0cae9e365114..98976231661ec 100644 --- a/docs/reference/data-streams/tsds-index-settings.asciidoc +++ b/docs/reference/data-streams/tsds-index-settings.asciidoc @@ -28,13 +28,13 @@ value (exclusive) accepted by the index. Only indices with an `index.mode` of `index.look_ahead_time`:: (<<_static_index_settings,Static>>, <>) Interval used to calculate the `index.time_series.end_time` for a TSDS's write -index. Defaults to `2h` (2 hours). Accepts `1m` (one minute) to `7d` (seven -days). Only indices with an `index.mode` of `time_series` support this setting. +index. Defaults to `2h` (2 hours). Accepts `1m` (one minute) to `2h` (two +hours). Only indices with an `index.mode` of `time_series` support this setting. For more information, refer to <>. Additionally this setting can not be less than `time_series.poll_interval` cluster setting. NOTE: Increasing the `look_ahead_time` will also increase the amount of time {ilm-cap} -waits before being able to proceed with executing the actions that expect the +waits before being able to proceed with executing the actions that expect the index to not receive any writes anymore. For more information, refer to <>. [[index-look-back-time]] diff --git a/docs/reference/esql/esql-async-query-api.asciidoc b/docs/reference/esql/esql-async-query-api.asciidoc new file mode 100644 index 0000000000000..335edaae33388 --- /dev/null +++ b/docs/reference/esql/esql-async-query-api.asciidoc @@ -0,0 +1,92 @@ +[[esql-async-query-api]] +== {esql} async query API +++++ +{esql} async query API +++++ + +Runs an async {esql} search. + +The async query API lets you asynchronously execute a search request, +monitor its progress, and retrieve results as they become available. + +Executing an <> is commonly quite fast, +however searches across large data sets or frozen data can take some time. +To avoid long waits, run an async {esql} search. + +Searches initiated by this API may return search results or not. The +`wait_for_completion_timeout` property determines how long to wait for +the search results. The default value is 1 second. If the results are +not available by this time, a search id is return which can be later +used to retrieve the results. + +Initiates an async search for an <> +query. The API accepts the same parameters and request body as the +<>. + +[source,console] +---- +POST /_query/async +{ + "query": """ + FROM library + | EVAL year = DATE_TRUNC(1 YEARS, release_date) + | STATS MAX(page_count) BY year + | SORT year + | LIMIT 5 + """, + "wait_for_completion_timeout": "2s" +} +---- +// TEST[setup:library] + +If the results are not available within the timeout period, 2 seconds in +this case, the search returns no results but rather a response that +includes: + + * A search ID + * An `is_running` value of true, indicating the search is ongoing + +The query continues to run in the background without blocking other +requests. + +[source,console-result] +---- +{ + "id": "FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=", + "is_running": true +} +---- +// TEST[skip: no access to search ID - may return response values] + +To check the progress of an async search, use the <> with the search ID. Specify how long you'd like for +complete results in the `wait_for_completion_timeout` parameter. + +[source,console] +---- +GET /_query/async/get/FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=?wait_for_completion_timeout=30s +---- +// TEST[skip: no access to search ID - may return response values] + +If the response's `is_running` value is `false`, the async search has +finished, and the results are returned. + +[source,console-result] +---- +{ + "id": "FmNJRUZ1YWZCU3dHY1BIOUhaenVSRkEaaXFlZ3h4c1RTWFNocDdnY2FSaERnUTozNDE=", + "is_running": false, + "columns": ... +} +---- +// TEST[skip: no access to search ID - may return response values] + +Use the <> to +delete an async search before the `keep_alive` period ends. If the query +is still running, {es} cancels it. + +[source,console] +---- +DELETE /_query/async/delete/FmdMX2pIang3UWhLRU5QS0lqdlppYncaMUpYQ05oSkpTc3kwZ21EdC1tbFJXQToxOTI= +---- +// TEST[skip: no access to search ID] diff --git a/docs/reference/esql/functions/abs.asciidoc b/docs/reference/esql/functions/abs.asciidoc index 3adb7dff07043..32b49bc287a83 100644 --- a/docs/reference/esql/functions/abs.asciidoc +++ b/docs/reference/esql/functions/abs.asciidoc @@ -1,18 +1,41 @@ [discrete] [[esql-abs]] === `ABS` + +*Syntax* + [.text-center] image::esql/functions/signature/abs.svg[Embedded,opts=inline] +*Parameters* + +`n`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* + Returns the absolute value. -[source,esql] +*Supported types* + +include::types/abs.asciidoc[] + +*Examples* + +[source.merge.styled,esql] ---- -FROM employees -| KEEP first_name, last_name, height -| EVAL abs_height = ABS(0.0 - height) +include::{esql-specs}/math.csv-spec[tag=docsAbs] ---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=docsAbs-result] +|=== -Supported types: - -include::types/abs.asciidoc[] +[source.merge.styled,esql] +---- +include::{esql-specs}/math.csv-spec[tag=docsAbsEmployees] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/math.csv-spec[tag=docsAbsEmployees-result] +|=== \ No newline at end of file diff --git a/docs/reference/esql/functions/asin.asciidoc b/docs/reference/esql/functions/asin.asciidoc index f03b5276b7dd6..222f6879785ef 100644 --- a/docs/reference/esql/functions/asin.asciidoc +++ b/docs/reference/esql/functions/asin.asciidoc @@ -1,10 +1,28 @@ [discrete] [[esql-asin]] === `ASIN` + +*Syntax* + [.text-center] image::esql/functions/signature/asin.svg[Embedded,opts=inline] -Inverse https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[sine] trigonometric function. +*Parameters* + +`n`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* + +Returns the +https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[arcsine] +of the input numeric expression as an angle, expressed in radians. + +*Supported types* + +include::types/asin.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -14,7 +32,3 @@ include::{esql-specs}/floats.csv-spec[tag=asin] |=== include::{esql-specs}/floats.csv-spec[tag=asin-result] |=== - -Supported types: - -include::types/asin.asciidoc[] diff --git a/docs/reference/esql/functions/atan.asciidoc b/docs/reference/esql/functions/atan.asciidoc index 3813e096aeba1..bdbbd07cbba60 100644 --- a/docs/reference/esql/functions/atan.asciidoc +++ b/docs/reference/esql/functions/atan.asciidoc @@ -1,10 +1,28 @@ [discrete] [[esql-atan]] === `ATAN` + +*Syntax* + [.text-center] image::esql/functions/signature/atan.svg[Embedded,opts=inline] -Inverse https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[tangent] trigonometric function. +*Parameters* + +`n`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* + +Returns the +https://en.wikipedia.org/wiki/Inverse_trigonometric_functions[arctangent] of the +input numeric expression as an angle, expressed in radians. + +*Supported types* + +include::types/atan.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -13,8 +31,4 @@ include::{esql-specs}/floats.csv-spec[tag=atan] [%header.monospaced.styled,format=dsv,separator=|] |=== include::{esql-specs}/floats.csv-spec[tag=atan-result] -|=== - -Supported types: - -include::types/atan.asciidoc[] +|=== \ No newline at end of file diff --git a/docs/reference/esql/functions/atan2.asciidoc b/docs/reference/esql/functions/atan2.asciidoc index e78a219333344..3ecc0ff86fe26 100644 --- a/docs/reference/esql/functions/atan2.asciidoc +++ b/docs/reference/esql/functions/atan2.asciidoc @@ -1,11 +1,31 @@ [discrete] [[esql-atan2]] === `ATAN2` + +*Syntax* + [.text-center] image::esql/functions/signature/atan2.svg[Embedded,opts=inline] -The https://en.wikipedia.org/wiki/Atan2[angle] between the positive x-axis and the -ray from the origin to the point (x , y) in the Cartesian plane. +*Parameters* + +`y`:: +Numeric expression. If `null`, the function returns `null`. + +`x`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* + +The https://en.wikipedia.org/wiki/Atan2[angle] between the positive x-axis and +the ray from the origin to the point (x , y) in the Cartesian plane, expressed +in radians. + +*Supported types* + +include::types/atan2.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -15,7 +35,3 @@ include::{esql-specs}/floats.csv-spec[tag=atan2] |=== include::{esql-specs}/floats.csv-spec[tag=atan2-result] |=== - -Supported types: - -include::types/atan2.asciidoc[] diff --git a/docs/reference/esql/functions/auto_bucket.asciidoc b/docs/reference/esql/functions/auto_bucket.asciidoc index 47e453f382229..2301939cf5050 100644 --- a/docs/reference/esql/functions/auto_bucket.asciidoc +++ b/docs/reference/esql/functions/auto_bucket.asciidoc @@ -1,72 +1,118 @@ [discrete] [[esql-auto_bucket]] === `AUTO_BUCKET` -Creates human-friendly buckets and returns a `datetime` value for each row that -corresponds to the resulting bucket the row falls into. Combine `AUTO_BUCKET` -with <> to create a date histogram. -You provide a target number of buckets, a start date, and an end date, and it -picks an appropriate bucket size to generate the target number of buckets or -fewer. For example, this asks for at most 20 buckets over a whole year, which -picks monthly buckets: +*Syntax* + +[source,esql] +---- +AUTO_BUCKET(field, buckets, from, to) +---- + +*Parameters* + +`field`:: +Numeric or date column from which to derive buckets. + +`buckets`:: +Target number of buckets. + +`from`:: +Start of the range. Can be a number or a date expressed as a string. + +`to`:: +End of the range. Can be a number or a date expressed as a string. + +*Description* + +Creates human-friendly buckets and returns a value for each row that corresponds +to the resulting bucket the row falls into. + +Using a target number of buckets, a start of a range, and an end of a range, +`AUTO_BUCKET` picks an appropriate bucket size to generate the target number of +buckets or fewer. For example, asking for at most 20 buckets over a year results +in monthly buckets: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=auto_bucket_month] +include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonth] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=auto_bucket_month-result] +include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonth-result] |=== The goal isn't to provide *exactly* the target number of buckets, it's to pick a -range that people are comfortable with that provides at most the target number of -buckets. +range that people are comfortable with that provides at most the target number +of buckets. -If you ask for more buckets then `AUTO_BUCKET` can pick a smaller range. For example, -asking for at most 100 buckets in a year will get you week long buckets: +Combine `AUTO_BUCKET` with +<> to create a histogram: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=auto_bucket_week] +include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonthlyHistogram] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=auto_bucket_week-result] +include::{esql-specs}/date.csv-spec[tag=docsAutoBucketMonthlyHistogram-result] |=== -`AUTO_BUCKET` does not filter any rows. It only uses the provided time range to -pick a good bucket size. For rows with a date outside of the range, it returns a -`datetime` that corresponds to a bucket outside the range. Combine `AUTO_BUCKET` -with <> to filter rows. +NOTE: `AUTO_BUCKET` does not create buckets that don't match any documents. +That's why this example is missing `1985-03-01` and other dates. -A more complete example might look like: +Asking for more buckets can result in a smaller range. For example, asking for +at most 100 buckets in a year results in weekly buckets: [source.merge.styled,esql] ---- -include::{esql-specs}/date.csv-spec[tag=auto_bucket_in_agg] +include::{esql-specs}/date.csv-spec[tag=docsAutoBucketWeeklyHistogram] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/date.csv-spec[tag=auto_bucket_in_agg-result] +include::{esql-specs}/date.csv-spec[tag=docsAutoBucketWeeklyHistogram-result] |=== -NOTE: `AUTO_BUCKET` does not create buckets that don't match any documents. That's -why the example above is missing `1985-03-01` and other dates. +NOTE: `AUTO_BUCKET` does not filter any rows. It only uses the provided range to +pick a good bucket size. For rows with a value outside of the range, it returns +a bucket value that corresponds to a bucket outside the range. Combine +`AUTO_BUCKET` with <> to filter rows. -==== Numeric fields +`AUTO_BUCKET` can also operate on numeric fields. For example, to create a +salary histogram: -`auto_bucket` can also operate on numeric fields like this: [source.merge.styled,esql] ---- -include::{esql-specs}/ints.csv-spec[tag=auto_bucket] +include::{esql-specs}/ints.csv-spec[tag=docsAutoBucketNumeric] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/ints.csv-spec[tag=auto_bucket-result] +include::{esql-specs}/ints.csv-spec[tag=docsAutoBucketNumeric-result] |=== -Unlike the example above where you are intentionally filtering on a date range, -you rarely want to filter on a numeric range. So you have find the `min` and `max` -separately. We don't yet have an easy way to do that automatically. Improvements -coming! +Unlike the earlier example that intentionally filters on a date range, you +rarely want to filter on a numeric range. You have to find the `min` and `max` +separately. {esql} doesn't yet have an easy way to do that automatically. + +*Examples* + +Create hourly buckets for the last 24 hours, and calculate the number of events +per hour: + + +[source.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsAutoBucketLast24hr] +---- + +Create monthly buckets for the year 1985, and calculate the average salary by +hiring month: + +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=auto_bucket_in_agg] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=auto_bucket_in_agg-result] +|=== diff --git a/docs/reference/esql/functions/avg.asciidoc b/docs/reference/esql/functions/avg.asciidoc index 972d30545ceb4..6345be99c5d6d 100644 --- a/docs/reference/esql/functions/avg.asciidoc +++ b/docs/reference/esql/functions/avg.asciidoc @@ -1,8 +1,27 @@ [discrete] [[esql-agg-avg]] === `AVG` + +*Syntax* + +[source,esql] +---- +AVG(column) +---- + +`column`:: +Numeric column. If `null`, the function returns `null`. + +*Description* + The average of a numeric field. +*Supported types* + +The result is always a `double` no matter the input type. + +*Example* + [source.merge.styled,esql] ---- include::{esql-specs}/stats.csv-spec[tag=avg] @@ -11,5 +30,3 @@ include::{esql-specs}/stats.csv-spec[tag=avg] |=== include::{esql-specs}/stats.csv-spec[tag=avg-result] |=== - -The result is always a `double` not matter the input type. diff --git a/docs/reference/esql/functions/case.asciidoc b/docs/reference/esql/functions/case.asciidoc index 84ff083147cb9..b5fda636135b2 100644 --- a/docs/reference/esql/functions/case.asciidoc +++ b/docs/reference/esql/functions/case.asciidoc @@ -32,6 +32,8 @@ no condition matches, the function returns `null`. *Example* +Determine whether employees are monolingual, bilingual, or polyglot: + [source,esql] [source.merge.styled,esql] ---- @@ -41,3 +43,28 @@ include::{esql-specs}/docs.csv-spec[tag=case] |=== include::{esql-specs}/docs.csv-spec[tag=case-result] |=== + +Calculate the total connection success rate based on log messages: + +[source,esql] +[source.merge.styled,esql] +---- +include::{esql-specs}/conditional.csv-spec[tag=docsCaseSuccessRate] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/conditional.csv-spec[tag=docsCaseSuccessRate-result] +|=== + +Calculate an hourly error rate as a percentage of the total number of log +messages: + +[source,esql] +[source.merge.styled,esql] +---- +include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate-result] +|=== diff --git a/docs/reference/esql/functions/ceil.asciidoc b/docs/reference/esql/functions/ceil.asciidoc index f977e544e6c3f..bc132e6bf47e6 100644 --- a/docs/reference/esql/functions/ceil.asciidoc +++ b/docs/reference/esql/functions/ceil.asciidoc @@ -1,11 +1,32 @@ [discrete] [[esql-ceil]] === `CEIL` + +*Syntax* + [.text-center] image::esql/functions/signature/ceil.svg[Embedded,opts=inline] +*Parameters* + +`n`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* + Round a number up to the nearest integer. +NOTE: This is a noop for `long` (including unsigned) and `integer`. + For `double` this picks the closest `double` value to the integer + similar to {javadoc}/java.base/java/lang/Math.html#ceil(double)[Math.ceil]. + +*Supported types* + +include::types/ceil.asciidoc[] + + +*Example* + [source.merge.styled,esql] ---- include::{esql-specs}/math.csv-spec[tag=ceil] @@ -14,11 +35,3 @@ include::{esql-specs}/math.csv-spec[tag=ceil] |=== include::{esql-specs}/math.csv-spec[tag=ceil-result] |=== - -NOTE: This is a noop for `long` (including unsigned) and `integer`. - For `double` this picks the the closest `double` value to the integer ala - {javadoc}/java.base/java/lang/Math.html#ceil(double)[Math.ceil]. - -Supported types: - -include::types/ceil.asciidoc[] diff --git a/docs/reference/esql/functions/cidr_match.asciidoc b/docs/reference/esql/functions/cidr_match.asciidoc index 5072a6eef7fd5..1c7fbb57a0044 100644 --- a/docs/reference/esql/functions/cidr_match.asciidoc +++ b/docs/reference/esql/functions/cidr_match.asciidoc @@ -2,15 +2,33 @@ [[esql-cidr_match]] === `CIDR_MATCH` +*Syntax* + +[source,esql] +---- +CIDR_MATCH(ip, block1[, ..., blockN]) +---- + +*Parameters* + +`ip`:: +IP address of type `ip` (both IPv4 and IPv6 are supported). + +`blockX`:: +CIDR block to test the IP against. + +*Description* + Returns `true` if the provided IP is contained in one of the provided CIDR blocks. -`CIDR_MATCH` accepts two or more arguments. The first argument is the IP -address of type `ip` (both IPv4 and IPv6 are supported). Subsequent arguments -are the CIDR blocks to test the IP against. +*Example* -[source,esql] +[source.merge.styled,esql] ---- -FROM hosts -| WHERE CIDR_MATCH(ip, "127.0.0.2/32", "127.0.0.3/32") +include::{esql-specs}/ip.csv-spec[tag=cdirMatchMultipleArgs] ---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/ip.csv-spec[tag=cdirMatchMultipleArgs-result] +|=== diff --git a/docs/reference/esql/functions/coalesce.asciidoc b/docs/reference/esql/functions/coalesce.asciidoc index 550780eaa070d..1121a75209151 100644 --- a/docs/reference/esql/functions/coalesce.asciidoc +++ b/docs/reference/esql/functions/coalesce.asciidoc @@ -2,7 +2,24 @@ [[esql-coalesce]] === `COALESCE` -Returns the first non-null value. +*Syntax* + +[source,esql] +---- +COALESCE(expression1 [, ..., expressionN]) +---- + +*Parameters* + +`expressionX`:: +Expression to evaluate. + +*Description* + +Returns the first of its arguments that is not null. If all arguments are null, +it returns `null`. + +*Example* [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/concat.asciidoc b/docs/reference/esql/functions/concat.asciidoc index 4864f5623a170..0b30211a72be2 100644 --- a/docs/reference/esql/functions/concat.asciidoc +++ b/docs/reference/esql/functions/concat.asciidoc @@ -1,11 +1,30 @@ [discrete] [[esql-concat]] === `CONCAT` -Concatenates two or more strings. + +*Syntax* [source,esql] ---- -FROM employees -| KEEP first_name, last_name, height -| EVAL fullname = CONCAT(first_name, " ", last_name) +CONCAT(string1, string2[, ..., stringN]) +---- + +*Parameters* + +`stringX`:: +Strings to concatenate. + +*Description* + +Concatenates two or more strings. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/eval.csv-spec[tag=docsConcat] ---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/eval.csv-spec[tag=docsConcat-result] +|=== diff --git a/docs/reference/esql/functions/cos.asciidoc b/docs/reference/esql/functions/cos.asciidoc index 7227f57e28120..f7874d46c558a 100644 --- a/docs/reference/esql/functions/cos.asciidoc +++ b/docs/reference/esql/functions/cos.asciidoc @@ -1,10 +1,27 @@ [discrete] [[esql-cos]] === `COS` + +*Syntax* + [.text-center] image::esql/functions/signature/cos.svg[Embedded,opts=inline] -https://en.wikipedia.org/wiki/Sine_and_cosine[Cosine] trigonometric function. Input expected in radians. +*Parameters* + +`n`:: +Numeric expression. If `null`, the function returns `null`. + +*Description* + +Returns the https://en.wikipedia.org/wiki/Sine_and_cosine[cosine] of `n`. Input +expected in radians. + +*Supported types* + +include::types/cos.asciidoc[] + +*Example* [source.merge.styled,esql] ---- @@ -14,7 +31,3 @@ include::{esql-specs}/floats.csv-spec[tag=cos] |=== include::{esql-specs}/floats.csv-spec[tag=cos-result] |=== - -Supported types: - -include::types/cos.asciidoc[] diff --git a/docs/reference/esql/functions/cosh.asciidoc b/docs/reference/esql/functions/cosh.asciidoc index 7bf0840958655..ae813e91ec9bb 100644 --- a/docs/reference/esql/functions/cosh.asciidoc +++ b/docs/reference/esql/functions/cosh.asciidoc @@ -1,10 +1,27 @@ [discrete] [[esql-cosh]] === `COSH` + +*Syntax* + [.text-center] image::esql/functions/signature/cosh.svg[Embedded,opts=inline] -https://en.wikipedia.org/wiki/Hyperbolic_functions[Cosine] hyperbolic function. +*Parameters* + +`n`:: +Numeric expression. If `null`, the function returns `null`. + +*Supported types* + +include::types/cosh.asciidoc[] + +*Description* + +Returns the https://en.wikipedia.org/wiki/Hyperbolic_functions[hyperbolic +cosine]. + +*Example* [source.merge.styled,esql] ---- @@ -14,7 +31,3 @@ include::{esql-specs}/floats.csv-spec[tag=cosh] |=== include::{esql-specs}/floats.csv-spec[tag=cosh-result] |=== - -Supported types: - -include::types/cosh.asciidoc[] diff --git a/docs/reference/esql/functions/count-distinct.asciidoc b/docs/reference/esql/functions/count-distinct.asciidoc index b5b1659140f63..14fa6eff39d4c 100644 --- a/docs/reference/esql/functions/count-distinct.asciidoc +++ b/docs/reference/esql/functions/count-distinct.asciidoc @@ -1,21 +1,28 @@ [discrete] [[esql-agg-count-distinct]] === `COUNT_DISTINCT` -The approximate number of distinct values. -[source.merge.styled,esql] +*Syntax* + +[source,esql] ---- -include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct] +COUNT_DISTINCT(column[, precision]) ---- -[%header.monospaced.styled,format=dsv,separator=|] -|=== -include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct-result] -|=== -Can take any field type as input and the result is always a `long` not matter -the input type. +*Parameters* + +`column`:: +Column for which to count the number of distinct values. + +`precision`:: +Precision. Refer to <>. + +*Description* + +Returns the approximate number of distinct values. [discrete] +[[esql-agg-count-distinct-approximate]] ==== Counts are approximate Computing exact counts requires loading values into a set and returning its @@ -30,11 +37,25 @@ properties: include::../../aggregations/metrics/cardinality-aggregation.asciidoc[tag=explanation] -[discrete] -==== Precision is configurable - The `COUNT_DISTINCT` function takes an optional second parameter to configure the -precision discussed previously. +precision. + +*Supported types* + +Can take any field type as input. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/stats_count_distinct.csv-spec[tag=count-distinct-result] +|=== + +With the optional second parameter to configure the precision: [source.merge.styled,esql] ---- diff --git a/docs/reference/esql/functions/count.asciidoc b/docs/reference/esql/functions/count.asciidoc index a148df07edb4d..70b13d7fc16b3 100644 --- a/docs/reference/esql/functions/count.asciidoc +++ b/docs/reference/esql/functions/count.asciidoc @@ -1,7 +1,29 @@ [discrete] [[esql-agg-count]] === `COUNT` -Counts field values. + +*Syntax* + +[source,esql] +---- +COUNT([input]) +---- + +*Parameters* + +`input`:: +Column or literal for which to count the number of values. If omitted, returns a +count all (the number of rows). + +*Description* + +Returns the total number (count) of input values. + +*Supported types* + +Can take any field type as input. + +*Examples* [source.merge.styled,esql] ---- @@ -12,10 +34,7 @@ include::{esql-specs}/stats.csv-spec[tag=count] include::{esql-specs}/stats.csv-spec[tag=count-result] |=== -Can take any field type as input and the result is always a `long` not matter -the input type. - -To count the number of rows, use `COUNT(*)`: +To count the number of rows, use `COUNT()` or `COUNT(*)`: [source.merge.styled,esql] ---- @@ -24,4 +43,4 @@ include::{esql-specs}/docs.csv-spec[tag=countAll] [%header.monospaced.styled,format=dsv,separator=|] |=== include::{esql-specs}/docs.csv-spec[tag=countAll-result] -|=== \ No newline at end of file +|=== diff --git a/docs/reference/esql/functions/date_extract.asciidoc b/docs/reference/esql/functions/date_extract.asciidoc index 89ef1cf261094..ce949483494a5 100644 --- a/docs/reference/esql/functions/date_extract.asciidoc +++ b/docs/reference/esql/functions/date_extract.asciidoc @@ -1,15 +1,56 @@ [discrete] [[esql-date_extract]] === `DATE_EXTRACT` -Extracts parts of a date, like year, month, day, hour. -The supported field types are those provided by https://docs.oracle.com/javase/8/docs/api/java/time/temporal/ChronoField.html[java.time.temporal.ChronoField]. + +*Syntax* + +[source,esql] +---- +DATE_EXTRACT(date_part, date) +---- + +*Parameters* + +`date_part`:: +Part of the date to extract. Can be: `aligned_day_of_week_in_month`, +`aligned_day_of_week_in_year`, `aligned_week_of_month`, `aligned_week_of_year`, +`ampm_of_day`, `clock_hour_of_ampm`, `clock_hour_of_day`, `day_of_month`, +`day_of_week`, `day_of_year`, `epoch_day`, `era`, `hour_of_ampm`, `hour_of_day`, +`instant_seconds`, `micro_of_day`, `micro_of_second`, `milli_of_day`, +`milli_of_second`, `minute_of_day`, `minute_of_hour`, `month_of_year`, +`nano_of_day`, `nano_of_second`, `offset_seconds`, `proleptic_month`, +`second_of_day`, `second_of_minute`, `year`, or `year_of_era`. Refer to +https://docs.oracle.com/javase/8/docs/api/java/time/temporal/ChronoField.html[java.time.temporal.ChronoField] +for a description of these values. ++ +If `null`, the function returns `null`. + +`date`:: +Date expression. If `null`, the function returns `null`. + +*Description* + +Extracts parts of a date, like year, month, day, hour. + +*Examples* [source.merge.styled,esql] ---- -include::{esql-specs}/docs.csv-spec[tag=dateExtract] +include::{esql-specs}/date.csv-spec[tag=dateExtract] ---- [%header.monospaced.styled,format=dsv,separator=|] |=== -include::{esql-specs}/docs.csv-spec[tag=dateExtract-result] +include::{esql-specs}/date.csv-spec[tag=dateExtract-result] |=== +Find all events that occurred outside of business hours (before 9 AM or after 5 +PM), on any given date: + +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsDateExtractBusinessHours] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsDateExtractBusinessHours-result] +|=== diff --git a/docs/reference/esql/functions/date_format.asciidoc b/docs/reference/esql/functions/date_format.asciidoc index 5a87f31412cc8..4a0d36d133a4c 100644 --- a/docs/reference/esql/functions/date_format.asciidoc +++ b/docs/reference/esql/functions/date_format.asciidoc @@ -1,12 +1,35 @@ [discrete] [[esql-date_format]] === `DATE_FORMAT` -Returns a string representation of a date in the provided format. If no format -is specified, the `yyyy-MM-dd'T'HH:mm:ss.SSSZ` format is used. + +*Syntax* [source,esql] ---- -FROM employees -| KEEP first_name, last_name, hire_date -| EVAL hired = DATE_FORMAT("YYYY-MM-dd", hire_date) +DATE_FORMAT([format,] date) +---- + +*Parameters* + +`format`:: +Date format (optional). If no format is specified, the +`yyyy-MM-dd'T'HH:mm:ss.SSSZ` format is used. If `null`, the function returns +`null`. + +`date`:: +Date expression. If `null`, the function returns `null`. + +*Description* + +Returns a string representation of a date, in the provided format. + +*Example* + +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsDateFormat] ---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsDateFormat-result] +|=== diff --git a/docs/reference/esql/functions/date_trunc.asciidoc b/docs/reference/esql/functions/date_trunc.asciidoc index ad0e1eb1170b4..4aa228dc14e65 100644 --- a/docs/reference/esql/functions/date_trunc.asciidoc +++ b/docs/reference/esql/functions/date_trunc.asciidoc @@ -1,13 +1,57 @@ [discrete] [[esql-date_trunc]] === `DATE_TRUNC` -Rounds down a date to the closest interval. Intervals can be expressed using the -<>. + +*Syntax* [source,esql] ---- -FROM employees -| EVAL year_hired = DATE_TRUNC(1 year, hire_date) -| STATS COUNT(emp_no) BY year_hired -| SORT year_hired +DATE_TRUNC(interval, date) +---- + +*Parameters* + +`interval`:: +Interval, expressed using the <>. If `null`, the function returns `null`. + +`date`:: +Date expression. If `null`, the function returns `null`. + +*Description* + +Rounds down a date to the closest interval. + +*Examples* + +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsDateTrunc] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsDateTrunc-result] +|=== + +Combine `DATE_TRUNC` with <> to create date histograms. For +example, the number of hires per year: + +[source.merge.styled,esql] +---- +include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram] +---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/date.csv-spec[tag=docsDateTruncHistogram-result] +|=== + +Or an hourly error rate: + +[source.merge.styled,esql] +---- +include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate] ---- +[%header.monospaced.styled,format=dsv,separator=|] +|=== +include::{esql-specs}/conditional.csv-spec[tag=docsCaseHourlyErrorRate-result] +|=== diff --git a/docs/reference/mapping/types/flattened.asciidoc b/docs/reference/mapping/types/flattened.asciidoc index 87f5cebe21993..0a72ebc98ecef 100644 --- a/docs/reference/mapping/types/flattened.asciidoc +++ b/docs/reference/mapping/types/flattened.asciidoc @@ -294,8 +294,8 @@ The following mapping parameters are accepted: <>:: A string value which is substituted for any explicit `null` values within - the flattened object field. Defaults to `null`, which means null sields are - treated as if it were missing. + the flattened object field. Defaults to `null`, which means null fields are + treated as if they were missing. <>:: diff --git a/docs/reference/query-dsl/exists-query.asciidoc b/docs/reference/query-dsl/exists-query.asciidoc index 75d1b07ea3851..9a9f642daa3f4 100644 --- a/docs/reference/query-dsl/exists-query.asciidoc +++ b/docs/reference/query-dsl/exists-query.asciidoc @@ -9,7 +9,7 @@ Returns documents that contain an indexed value for a field. An indexed value may not exist for a document's field due to a variety of reasons: * The field in the source JSON is `null` or `[]` -* The field has `"index" : false` set in the mapping +* The field has `"index" : false` and `"doc_values" : false` set in the mapping * The length of the field value exceeded an `ignore_above` setting in the mapping * The field value was malformed and `ignore_malformed` was defined in the mapping diff --git a/docs/reference/query-dsl/span-containing-query.asciidoc b/docs/reference/query-dsl/span-containing-query.asciidoc index ec1c0bdf0a8d6..8a8eeba12a7b2 100644 --- a/docs/reference/query-dsl/span-containing-query.asciidoc +++ b/docs/reference/query-dsl/span-containing-query.asciidoc @@ -4,8 +4,7 @@ Span containing ++++ -Returns matches which enclose another span query. The span containing -query maps to Lucene `SpanContainingQuery`. Here is an example: +Returns matches which enclose another span query. Here is an example: [source,console] -------------------------------------------------- diff --git a/docs/reference/query-dsl/span-field-masking-query.asciidoc b/docs/reference/query-dsl/span-field-masking-query.asciidoc index 3a869f64b45f3..b0a9a0a1d6207 100644 --- a/docs/reference/query-dsl/span-field-masking-query.asciidoc +++ b/docs/reference/query-dsl/span-field-masking-query.asciidoc @@ -4,11 +4,11 @@ Span field masking ++++ -Wrapper to allow span queries to participate in composite single-field span queries by 'lying' about their search field. The span field masking query maps to Lucene's `SpanFieldMaskingQuery` +Wrapper to allow span queries to participate in composite single-field span queries by 'lying' about their search field. This can be used to support queries like `span-near` or `span-or` across different fields, which is not ordinarily permitted. -Span field masking query is invaluable in conjunction with *multi-fields* when same content is indexed with multiple analyzers. For instance we could index a field with the standard analyzer which breaks text up into words, and again with the english analyzer which stems words into their root form. +Span field masking query is invaluable in conjunction with *multi-fields* when same content is indexed with multiple analyzers. For instance, we could index a field with the standard analyzer which breaks text up into words, and again with the english analyzer which stems words into their root form. Example: @@ -28,18 +28,33 @@ GET /_search "span_field_masking": { "query": { "span_term": { - "text.stems": "fox" + "text.stems": "fox" <1> } }, - "field": "text" + "field": "text" <2> } } ], "slop": 5, "in_order": false } + }, + "highlight": { + "require_field_match" : false, <3> + "fields": { + "*": {} + } } } -------------------------------------------------- +<1> Original field on which we do the search +<2> Masked field, which we are masking with the original field +<3> Use "require_field_match" : false to highlight the masked field + +Note: `span_field_masking` query may have unexpected scoring and highlighting +behaviour. This is because the query returns and highlights the masked field, +but scoring and highlighting are done using the terms statistics and offsets +of the original field. -Note: as span field masking query returns the masked field, scoring will be done using the norms of the field name supplied. This may lead to unexpected scoring behaviour. +Note: For highlighting to work the parameter: `require_field_match` should +be set to `false` on the highlighter. diff --git a/docs/reference/query-dsl/span-first-query.asciidoc b/docs/reference/query-dsl/span-first-query.asciidoc index 77e3f557fd982..0b6d4ef80adfb 100644 --- a/docs/reference/query-dsl/span-first-query.asciidoc +++ b/docs/reference/query-dsl/span-first-query.asciidoc @@ -4,8 +4,7 @@ Span first ++++ -Matches spans near the beginning of a field. The span first query maps -to Lucene `SpanFirstQuery`. Here is an example: +Matches spans near the beginning of a field. Here is an example: [source,console] -------------------------------------------------- @@ -19,7 +18,7 @@ GET /_search "end": 3 } } -} +} -------------------------------------------------- The `match` clause can be any other span type query. The `end` controls diff --git a/docs/reference/query-dsl/span-near-query.asciidoc b/docs/reference/query-dsl/span-near-query.asciidoc index 0a1aa7082fbb2..1c68cfa12f72c 100644 --- a/docs/reference/query-dsl/span-near-query.asciidoc +++ b/docs/reference/query-dsl/span-near-query.asciidoc @@ -6,8 +6,7 @@ Matches spans which are near one another. One can specify _slop_, the maximum number of intervening unmatched positions, as well as whether -matches are required to be in-order. The span near query maps to Lucene -`SpanNearQuery`. Here is an example: +matches are required to be in-order. Here is an example: [source,console] -------------------------------------------------- diff --git a/docs/reference/query-dsl/span-not-query.asciidoc b/docs/reference/query-dsl/span-not-query.asciidoc index 99814eba9d88a..c1ddf00a7a939 100644 --- a/docs/reference/query-dsl/span-not-query.asciidoc +++ b/docs/reference/query-dsl/span-not-query.asciidoc @@ -6,8 +6,8 @@ Removes matches which overlap with another span query or which are within x tokens before (controlled by the parameter `pre`) or y tokens -after (controlled by the parameter `post`) another SpanQuery. The span not -query maps to Lucene `SpanNotQuery`. Here is an example: +after (controlled by the parameter `post`) another SpanQuery. +Here is an example: [source,console] -------------------------------------------------- diff --git a/docs/reference/query-dsl/span-or-query.asciidoc b/docs/reference/query-dsl/span-or-query.asciidoc index 6c0e78ab266d9..4ab12073c5d2c 100644 --- a/docs/reference/query-dsl/span-or-query.asciidoc +++ b/docs/reference/query-dsl/span-or-query.asciidoc @@ -4,8 +4,7 @@ Span or ++++ -Matches the union of its span clauses. The span or query maps to Lucene -`SpanOrQuery`. Here is an example: +Matches the union of its span clauses. Here is an example: [source,console] -------------------------------------------------- diff --git a/docs/reference/query-dsl/span-term-query.asciidoc b/docs/reference/query-dsl/span-term-query.asciidoc index 0dac73c9f7019..8e5e49d14e452 100644 --- a/docs/reference/query-dsl/span-term-query.asciidoc +++ b/docs/reference/query-dsl/span-term-query.asciidoc @@ -4,8 +4,7 @@ Span term ++++ -Matches spans containing a term. The span term query maps to Lucene -`SpanTermQuery`. Here is an example: +Matches spans containing a term. Here is an example: [source,console] -------------------------------------------------- @@ -14,7 +13,7 @@ GET /_search "query": { "span_term" : { "user.id" : "kimchy" } } -} +} -------------------------------------------------- A boost can also be associated with the query: @@ -26,7 +25,7 @@ GET /_search "query": { "span_term" : { "user.id" : { "value" : "kimchy", "boost" : 2.0 } } } -} +} -------------------------------------------------- Or : @@ -38,5 +37,5 @@ GET /_search "query": { "span_term" : { "user.id" : { "term" : "kimchy", "boost" : 2.0 } } } -} +} -------------------------------------------------- diff --git a/docs/reference/query-dsl/span-within-query.asciidoc b/docs/reference/query-dsl/span-within-query.asciidoc index 62a12fc719613..0592e83117014 100644 --- a/docs/reference/query-dsl/span-within-query.asciidoc +++ b/docs/reference/query-dsl/span-within-query.asciidoc @@ -4,8 +4,8 @@ Span within ++++ -Returns matches which are enclosed inside another span query. The span within -query maps to Lucene `SpanWithinQuery`. Here is an example: +Returns matches which are enclosed inside another span query. +Here is an example: [source,console] -------------------------------------------------- diff --git a/docs/reference/query-dsl/text-expansion-query.asciidoc b/docs/reference/query-dsl/text-expansion-query.asciidoc index e924cc05376d9..46a9aafdd1af8 100644 --- a/docs/reference/query-dsl/text-expansion-query.asciidoc +++ b/docs/reference/query-dsl/text-expansion-query.asciidoc @@ -52,10 +52,42 @@ text. (Required, string) The query text you want to use for search. +`pruning_config` :::: +(Optional, object) +preview:[] +Optional pruning configuration. If enabled, this will omit non-significant tokens from the query in order to improve query performance. +Default: Disabled. ++ +-- +Parameters for `` are: + +`tokens_freq_ratio_threshold`:: +(Optional, float) +preview:[] +Tokens whose frequency is more than `tokens_freq_ratio_threshold` times the average frequency of all tokens in the specified field are considered outliers and pruned. +This value must between 1 and 100. +Default: `5`. + +`tokens_weight_threshold`:: +(Optional, float) +preview:[] +Tokens whose weight is less than `tokens_weight_threshold` are considered nonsignificant and pruned. +This value must be between 0 and 1. +Default: `0.4`. + +`only_score_pruned_tokens`:: +(Optional, boolean) +preview:[] +If `true` we only input pruned tokens into scoring, and discard non-pruned tokens. +It is strongly recommended to set this to `false` for the main query, but this can be set to `true` for a rescore query to get more relevant results. +Default: `false`. + +NOTE: The default values for `tokens_freq_ratio_threshold` and `tokens_weight_threshold` were chosen based on tests using ELSER that provided the most optimal results. +-- [discrete] [[text-expansion-query-example]] -=== Example +=== Example ELSER query The following is an example of the `text_expansion` query that references the ELSER model to perform semantic search. For a more detailed description of how @@ -69,7 +101,7 @@ GET my-index/_search "query":{ "text_expansion":{ "ml.tokens":{ - "model_id":".elser_model_1", + "model_id":".elser_model_2", "model_text":"How is the weather in Jamaica?" } } @@ -78,7 +110,108 @@ GET my-index/_search ---- // TEST[skip: TBD] +[discrete] +[[text-expansion-query-with-pruning-config-example]] +=== Example ELSER query with pruning configuration + +The following is an extension to the above example that adds a preview:[] pruning configuration to the `text_expansion` query. +The pruning configuration identifies non-significant tokens to prune from the query in order to improve query performance. +[source,console] +---- +GET my-index/_search +{ + "query":{ + "text_expansion":{ + "ml.tokens":{ + "model_id":".elser_model_2", + "model_text":"How is the weather in Jamaica?" + }, + "pruning_config": { + "tokens_freq_ratio_threshold": 5, + "tokens_weight_threshold": 0.4, + "only_score_pruned_tokens": false + } + } + } +} +---- +// TEST[skip: TBD] + +[discrete] +[[text-expansion-query-with-pruning-config-and-rescore-example]] +=== Example ELSER query with pruning configuration and rescore + +The following is an extension to the above example that adds a <> function on top of the preview:[] pruning configuration to the `text_expansion` query. +The pruning configuration identifies non-significant tokens to prune from the query in order to improve query performance. +Rescoring the query with the tokens that were originally pruned from the query may improve overall search relevance when using this pruning strategy. + +[source,console] +---- +GET my-index/_search +{ + "query":{ + "text_expansion":{ + "ml.tokens":{ + "model_id":".elser_model_2", + "model_text":"How is the weather in Jamaica?" + }, + "pruning_config": { + "tokens_freq_ratio_threshold": 5, + "tokens_weight_threshold": 0.4, + "only_score_pruned_tokens": false + } + } + }, + "rescore": { + "window_size": 100, + "query": { + "rescore_query": { + "text_expansion": { + "ml.tokens": { + "model_id": ".elser_model_2", + "model_text": "How is the weather in Jamaica?" + }, + "pruning_config": { + "tokens_freq_ratio_threshold": 5, + "tokens_weight_threshold": 0.4, + "only_score_pruned_tokens": false + } + } + } + } + } +} +---- +//TEST[skip: TBD] + [NOTE] ==== Depending on your data, the text expansion query may be faster with `track_total_hits: false`. ==== + +[discrete] +[[weighted-tokens-query-example]] +=== Example Weighted token query + +In order to quickly iterate during tests, we exposed a new preview:[] `weighted_tokens` query for evaluation of tokenized datasets. +While this is not a query that is intended for production use, it can be used to quickly evaluate relevance using various pruning configurations. + +[source,console] +---- +POST /docs/_search +{ + "query": { + "weighted_tokens": { + "query_expansion": { + "tokens": {"2161": 0.4679, "2621": 0.307, "2782": 0.1299, "2851": 0.1056, "3088": 0.3041, "3376": 0.1038, "3467": 0.4873, "3684": 0.8958, "4380": 0.334, "4542": 0.4636, "4633": 2.2805, "4785": 1.2628, "4860": 1.0655, "5133": 1.0709, "7139": 1.0016, "7224": 0.2486, "7387": 0.0985, "7394": 0.0542, "8915": 0.369, "9156": 2.8947, "10505": 0.2771, "11464": 0.3996, "13525": 0.0088, "14178": 0.8161, "16893": 0.1376, "17851": 1.5348, "19939": 0.6012}, + "pruning_config": { + "tokens_freq_ratio_threshold": 5, + "tokens_weight_threshold": 0.4, + "only_score_pruned_tokens": false + } + } + } + } +} +---- +//TEST[skip: TBD] diff --git a/docs/reference/release-notes.asciidoc b/docs/reference/release-notes.asciidoc index 340ef3a5c57c4..068cb3d2f127b 100644 --- a/docs/reference/release-notes.asciidoc +++ b/docs/reference/release-notes.asciidoc @@ -8,6 +8,7 @@ This section summarizes the changes in each release. * <> * <> +* <> * <> * <> * <> @@ -59,6 +60,7 @@ This section summarizes the changes in each release. include::release-notes/8.13.0.asciidoc[] include::release-notes/8.12.0.asciidoc[] +include::release-notes/8.11.3.asciidoc[] include::release-notes/8.11.2.asciidoc[] include::release-notes/8.11.1.asciidoc[] include::release-notes/8.11.0.asciidoc[] diff --git a/docs/reference/release-notes/8.11.2.asciidoc b/docs/reference/release-notes/8.11.2.asciidoc index ebf5db2e2505e..75987ce6139a6 100644 --- a/docs/reference/release-notes/8.11.2.asciidoc +++ b/docs/reference/release-notes/8.11.2.asciidoc @@ -8,6 +8,12 @@ Also see <>. === Known issues include::8.10.3.asciidoc[tag=no-preventive-gc-issue] +[float] +[[security-updates-8.11.2]] +=== Security updates + +* The 8.11.2 patch release contains a fix for a potential security vulnerability. https://discuss.elastic.co/c/announcements/security-announcements/31[Please see our security advisory for more details]. + [[bug-8.11.2]] [float] === Bug fixes diff --git a/docs/reference/release-notes/8.11.3.asciidoc b/docs/reference/release-notes/8.11.3.asciidoc new file mode 100644 index 0000000000000..ddeb50dad1f75 --- /dev/null +++ b/docs/reference/release-notes/8.11.3.asciidoc @@ -0,0 +1,28 @@ +[[release-notes-8.11.3]] +== {es} version 8.11.3 + +coming[8.11.3] + +Also see <>. + +[[bug-8.11.3]] +[float] +=== Bug fixes + +Application:: +* Use latest version of entsearch ingestion pipeline {es-pull}103087[#103087] + +ES|QL:: +* Allow match field in enrich fields {es-pull}102734[#102734] +* Collect warnings in compute service {es-pull}103031[#103031] (issues: {es-issue}100163[#100163], {es-issue}103028[#103028], {es-issue}102871[#102871], {es-issue}102982[#102982]) + +ILM+SLM:: +* [ILM] More resilient when a policy is added to searchable snapshot {es-pull}102741[#102741] (issue: {es-issue}101958[#101958]) + +Mapping:: +* Ensure `dynamicMapping` updates are handled in insertion order {es-pull}103047[#103047] + +Transform:: +* Ensure transform `_schedule_now` API only triggers the expected transform task {es-pull}102958[#102958] (issue: {es-issue}102956[#102956]) + + diff --git a/docs/reference/search/search-your-data/search-with-synonyms.asciidoc b/docs/reference/search/search-your-data/search-with-synonyms.asciidoc index fb6abd6d36099..16952f94890c7 100644 --- a/docs/reference/search/search-your-data/search-with-synonyms.asciidoc +++ b/docs/reference/search/search-your-data/search-with-synonyms.asciidoc @@ -75,6 +75,13 @@ A large number of inline synonyms increases cluster size unnecessarily and can l Once your synonyms sets are created, you can start configuring your token filters and analyzers to use them. +[WARNING] +====== +Synonyms sets must exist before they can be added to indices. +If an index is created referencing a nonexistent synonyms set, the index will remain in a partially created and inoperable state. +The only way to recover from this scenario is to ensure the synonyms set exists then either delete and re-create the index, or close and re-open the index. +====== + {es} uses synonyms as part of the <>. You can use two types of <> to include synonyms: diff --git a/docs/reference/settings/security-settings.asciidoc b/docs/reference/settings/security-settings.asciidoc index f4875fd096b00..78850f617ee65 100644 --- a/docs/reference/settings/security-settings.asciidoc +++ b/docs/reference/settings/security-settings.asciidoc @@ -71,6 +71,11 @@ the sensitive nature of the information. (<>) Enables fips mode of operation. Set this to `true` if you run this {es} instance in a FIPS 140-2 enabled JVM. For more information, see <>. Defaults to `false`. +`xpack.security.fips_mode.required_providers`:: +(<>) +Optionally enforce specific Java JCE/JSSE security providers. For example, set this to `["BCFIPS", "BCJSSE"]` (case-insensitive) to require +the Bouncy Castle FIPS JCE and JSSE security providers. Only applicable when `xpack.security.fips_mode.enabled` is set to `true`. + [discrete] [[password-hashing-settings]] ==== Password hashing settings diff --git a/docs/reference/setup/secure-settings.asciidoc b/docs/reference/setup/secure-settings.asciidoc index d51c0dd684871..22e828f96f5d2 100644 --- a/docs/reference/setup/secure-settings.asciidoc +++ b/docs/reference/setup/secure-settings.asciidoc @@ -6,11 +6,11 @@ their values is not sufficient. For this use case, {es} provides a keystore and the <> to manage the settings in the keystore. -IMPORTANT: Only some settings are designed to be read from the keystore. However, -the keystore has no validation to block unsupported settings. Adding unsupported -settings to the keystore causes {es} to fail to start. To see whether a setting -is supported in the keystore, look for a "Secure" qualifier in the setting -reference. +IMPORTANT: Only some settings are designed to be read from the keystore. +Adding unsupported settings to the keystore causes the validation in the +`_nodes/reload_secure_settings` API to fail and if not addressed, will +cause {es} to fail to start. To see whether a setting is supported in the +keystore, look for a "Secure" qualifier in the setting reference. All the modifications to the keystore take effect only after restarting {es}. @@ -42,12 +42,12 @@ POST _nodes/reload_secure_settings <1> The password that the {es} keystore is encrypted with. -This API decrypts and re-reads the entire keystore, on every cluster node, -but only the *reloadable* secure settings are applied. Changes to other -settings do not go into effect until the next restart. Once the call returns, -the reload has been completed, meaning that all internal data structures -dependent on these settings have been changed. Everything should look as if the -settings had the new value from the start. +This API decrypts, re-reads the entire keystore and validates all settings on +every cluster node, but only the *reloadable* secure settings are applied. +Changes to other settings do not go into effect until the next restart. Once +the call returns, the reload has been completed, meaning that all internal data +structures dependent on these settings have been changed. Everything should +look as if the settings had the new value from the start. When changing multiple *reloadable* secure settings, modify all of them on each cluster node, then issue a <> diff --git a/docs/reference/synonyms/apis/delete-synonym-rule.asciidoc b/docs/reference/synonyms/apis/delete-synonym-rule.asciidoc index eaff47f5d7909..74cbab8c0b4a2 100644 --- a/docs/reference/synonyms/apis/delete-synonym-rule.asciidoc +++ b/docs/reference/synonyms/apis/delete-synonym-rule.asciidoc @@ -1,8 +1,6 @@ [[delete-synonym-rule]] === Delete synonym rule -beta::[] - ++++ Delete synonym rule ++++ diff --git a/docs/reference/synonyms/apis/delete-synonyms-set.asciidoc b/docs/reference/synonyms/apis/delete-synonyms-set.asciidoc index 6ba4dcdc8f7be..9ba33ff3a5c75 100644 --- a/docs/reference/synonyms/apis/delete-synonyms-set.asciidoc +++ b/docs/reference/synonyms/apis/delete-synonyms-set.asciidoc @@ -1,8 +1,6 @@ [[delete-synonyms-set]] === Delete synonyms set -beta::[] - ++++ Delete synonyms set ++++ diff --git a/docs/reference/synonyms/apis/get-synonym-rule.asciidoc b/docs/reference/synonyms/apis/get-synonym-rule.asciidoc index 6ce978ae68ac6..c6c35e0efecca 100644 --- a/docs/reference/synonyms/apis/get-synonym-rule.asciidoc +++ b/docs/reference/synonyms/apis/get-synonym-rule.asciidoc @@ -1,8 +1,6 @@ [[get-synonym-rule]] === Get synonym rule -beta::[] - ++++ Get synonym rule ++++ diff --git a/docs/reference/synonyms/apis/get-synonyms-set.asciidoc b/docs/reference/synonyms/apis/get-synonyms-set.asciidoc index ddd7d2079dbf5..70bb5fb69526d 100644 --- a/docs/reference/synonyms/apis/get-synonyms-set.asciidoc +++ b/docs/reference/synonyms/apis/get-synonyms-set.asciidoc @@ -1,8 +1,6 @@ [[get-synonyms-set]] === Get synonyms set -beta::[] - ++++ Get synonyms set ++++ diff --git a/docs/reference/synonyms/apis/list-synonyms-sets.asciidoc b/docs/reference/synonyms/apis/list-synonyms-sets.asciidoc index 2522542886d9e..705a24c809e99 100644 --- a/docs/reference/synonyms/apis/list-synonyms-sets.asciidoc +++ b/docs/reference/synonyms/apis/list-synonyms-sets.asciidoc @@ -1,8 +1,6 @@ [[list-synonyms-sets]] === List synonyms sets -beta::[] - ++++ List synonyms sets ++++ diff --git a/docs/reference/synonyms/apis/put-synonym-rule.asciidoc b/docs/reference/synonyms/apis/put-synonym-rule.asciidoc index 95492c95d36fe..de2865632d55e 100644 --- a/docs/reference/synonyms/apis/put-synonym-rule.asciidoc +++ b/docs/reference/synonyms/apis/put-synonym-rule.asciidoc @@ -1,8 +1,6 @@ [[put-synonym-rule]] === Create or update synonym rule -beta::[] - ++++ Create or update synonym rule ++++ diff --git a/docs/reference/synonyms/apis/put-synonyms-set.asciidoc b/docs/reference/synonyms/apis/put-synonyms-set.asciidoc index a3c06c70db17b..5651c4c99adcd 100644 --- a/docs/reference/synonyms/apis/put-synonyms-set.asciidoc +++ b/docs/reference/synonyms/apis/put-synonyms-set.asciidoc @@ -1,8 +1,6 @@ [[put-synonyms-set]] === Create or update synonyms set -beta::[] - ++++ Create or update synonyms set ++++ diff --git a/docs/reference/synonyms/apis/synonyms-apis.asciidoc b/docs/reference/synonyms/apis/synonyms-apis.asciidoc index 6849477177dcf..9b92ba8e8579d 100644 --- a/docs/reference/synonyms/apis/synonyms-apis.asciidoc +++ b/docs/reference/synonyms/apis/synonyms-apis.asciidoc @@ -1,8 +1,6 @@ [[synonyms-apis]] == Synonyms APIs -beta::[] - ++++ Synonyms APIs ++++ diff --git a/docs/reference/troubleshooting/network-timeouts.asciidoc b/docs/reference/troubleshooting/network-timeouts.asciidoc index ab60eeff1b1a9..1920dafe62210 100644 --- a/docs/reference/troubleshooting/network-timeouts.asciidoc +++ b/docs/reference/troubleshooting/network-timeouts.asciidoc @@ -34,9 +34,9 @@ end::troubleshooting-network-timeouts-packet-capture-fault-detection[] tag::troubleshooting-network-timeouts-threads[] * Long waits for particular threads to be available can be identified by taking -stack dumps (for example, using `jstack`) or a profiling trace (for example, -using Java Flight Recorder) in the few seconds leading up to the relevant log -message. +stack dumps of the main {es} process (for example, using `jstack`) or a +profiling trace (for example, using Java Flight Recorder) in the few seconds +leading up to the relevant log message. + The <> API sometimes yields useful information, but bear in mind that this API also requires a number of `transport_worker` and diff --git a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java index 6df51189e918e..1d6df60df0f88 100644 --- a/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java +++ b/docs/src/yamlRestTest/java/org/elasticsearch/smoketest/DocsClientYamlTestSuiteIT.java @@ -16,7 +16,6 @@ import org.apache.http.util.EntityUtils; import org.apache.lucene.tests.util.TimeUnits; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.settings.SecureString; @@ -48,7 +47,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import java.util.function.Predicate; import static java.util.Collections.emptyList; import static java.util.Collections.emptyMap; @@ -98,20 +96,9 @@ protected boolean randomizeContentType() { protected ClientYamlTestClient initClientYamlTestClient( final ClientYamlSuiteRestSpec restSpec, final RestClient restClient, - final List hosts, - final Version esVersion, - final Predicate clusterFeaturesPredicate, - final String os + final List hosts ) { - return new ClientYamlDocsTestClient( - restSpec, - restClient, - hosts, - esVersion, - clusterFeaturesPredicate, - os, - this::getClientBuilderWithSniffedHosts - ); + return new ClientYamlDocsTestClient(restSpec, restClient, hosts, this::getClientBuilderWithSniffedHosts); } @Before diff --git a/gradle/verification-metadata.xml b/gradle/verification-metadata.xml index 263602c9841a8..185ddcf0606dc 100644 --- a/gradle/verification-metadata.xml +++ b/gradle/verification-metadata.xml @@ -2664,9 +2664,9 @@ - - - + + + @@ -2674,9 +2674,9 @@ - - - + + + @@ -2684,9 +2684,9 @@ - - - + + + @@ -2694,9 +2694,9 @@ - - - + + + @@ -2704,9 +2704,9 @@ - - - + + + @@ -2714,9 +2714,9 @@ - - - + + + @@ -2724,9 +2724,9 @@ - - - + + + @@ -2734,9 +2734,9 @@ - - - + + + @@ -2744,9 +2744,9 @@ - - - + + + @@ -2754,9 +2754,9 @@ - - - + + + @@ -2764,9 +2764,9 @@ - - - + + + @@ -2774,9 +2774,9 @@ - - - + + + @@ -2784,9 +2784,9 @@ - - - + + + @@ -2794,9 +2794,9 @@ - - - + + + @@ -2804,9 +2804,9 @@ - - - + + + @@ -2814,9 +2814,9 @@ - - - + + + @@ -2824,9 +2824,9 @@ - - - + + + @@ -2834,9 +2834,9 @@ - - - + + + @@ -2844,9 +2844,9 @@ - - - + + + @@ -2854,9 +2854,9 @@ - - - + + + @@ -2864,9 +2864,9 @@ - - - + + + @@ -2874,9 +2874,9 @@ - - - + + + @@ -2884,9 +2884,9 @@ - - - + + + @@ -2894,9 +2894,9 @@ - - - + + + diff --git a/libs/cli/build.gradle b/libs/cli/build.gradle index c12ae87ee65fe..dc045ba09e531 100644 --- a/libs/cli/build.gradle +++ b/libs/cli/build.gradle @@ -11,9 +11,12 @@ apply plugin: 'elasticsearch.publish' dependencies { api 'net.sf.jopt-simple:jopt-simple:5.0.2' api project(':libs:elasticsearch-core') + + testImplementation(project(":test:framework")) { + exclude group: 'org.elasticsearch', module: 'elasticsearch-cli' + } } -tasks.named("test").configure { enabled = false } // Since CLI does not depend on :server, it cannot run the jarHell task tasks.named("jarHell").configure { enabled = false } diff --git a/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java b/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java index 856dfc6a5a078..69cb76636a996 100644 --- a/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java +++ b/libs/cli/src/main/java/org/elasticsearch/cli/Terminal.java @@ -18,6 +18,8 @@ import java.io.OutputStream; import java.io.PrintWriter; import java.io.Reader; +import java.lang.reflect.InvocationTargetException; +import java.lang.reflect.Method; import java.nio.charset.Charset; import java.util.Arrays; import java.util.Locale; @@ -274,8 +276,8 @@ public boolean isHeadless() { } private static class ConsoleTerminal extends Terminal { - - private static final Console CONSOLE = System.console(); + private static final int JDK_VERSION_WITH_IS_TERMINAL = 22; + private static final Console CONSOLE = detectTerminal(); ConsoleTerminal() { super(CONSOLE.reader(), CONSOLE.writer(), ERROR_WRITER); @@ -285,6 +287,23 @@ static boolean isSupported() { return CONSOLE != null; } + static Console detectTerminal() { + // JDK >= 22 returns a console even if the terminal is redirected unless using -Djdk.console=java.base + // https://bugs.openjdk.org/browse/JDK-8308591 + Console console = System.console(); + if (console != null && Runtime.version().feature() >= JDK_VERSION_WITH_IS_TERMINAL) { + try { + // verify the console is a terminal using isTerminal() on JDK >= 22 + // TODO: Remove reflection once Java 22 sources are supported, e.g. using a MRJAR + Method isTerminal = Console.class.getMethod("isTerminal"); + return Boolean.TRUE.equals(isTerminal.invoke(console)) ? console : null; + } catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException e) { + throw new AssertionError(e); + } + } + return console; + } + @Override public String readText(String prompt) { return CONSOLE.readLine("%s", prompt); diff --git a/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java b/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java new file mode 100644 index 0000000000000..9c1faf911a829 --- /dev/null +++ b/libs/cli/src/test/java/org/elasticsearch/cli/TerminalTests.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.cli; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.ESTestCase.WithoutSecurityManager; + +@WithoutSecurityManager +public class TerminalTests extends ESTestCase { + + public void testSystemTerminalIfRedirected() { + // Expect system terminal if redirected for tests. + // To force new behavior in JDK 22 this should run without security manager. + // Otherwise, JDK 22 doesn't provide a console if redirected. + assertEquals(Terminal.SystemTerminal.class, Terminal.DEFAULT.getClass()); + } +} diff --git a/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParser.java b/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParser.java index 4a166a03ecdf0..96d186dd612b0 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/xcontent/FilterXContentParser.java @@ -236,7 +236,10 @@ public boolean isClosed() { @Override public void close() throws IOException { - delegate().close(); + var closeable = delegate(); + if (closeable != null) { + closeable.close(); + } } @Override diff --git a/libs/x-content/src/test/java/org/elasticsearch/xcontent/ConstructingObjectParserTests.java b/libs/x-content/src/test/java/org/elasticsearch/xcontent/ConstructingObjectParserTests.java index 446fb21471961..f0703c626c583 100644 --- a/libs/x-content/src/test/java/org/elasticsearch/xcontent/ConstructingObjectParserTests.java +++ b/libs/x-content/src/test/java/org/elasticsearch/xcontent/ConstructingObjectParserTests.java @@ -102,22 +102,24 @@ public void testRandomOrder() throws Exception { } public void testMissingAllConstructorArgs() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, "{ \"mineral\": 1 }"); - ConstructingObjectParser objectParser = randomBoolean() - ? HasCtorArguments.PARSER - : HasCtorArguments.PARSER_VEGETABLE_OPTIONAL; - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> objectParser.apply(parser, null)); - if (objectParser == HasCtorArguments.PARSER) { - assertEquals("Required [animal, vegetable]", e.getMessage()); - } else { - assertEquals("Required [animal]", e.getMessage()); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{ \"mineral\": 1 }")) { + ConstructingObjectParser objectParser = randomBoolean() + ? HasCtorArguments.PARSER + : HasCtorArguments.PARSER_VEGETABLE_OPTIONAL; + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> objectParser.apply(parser, null)); + if (objectParser == HasCtorArguments.PARSER) { + assertEquals("Required [animal, vegetable]", e.getMessage()); + } else { + assertEquals("Required [animal]", e.getMessage()); + } } } public void testMissingAllConstructorArgsButNotRequired() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, "{ \"mineral\": 1 }"); - HasCtorArguments parsed = HasCtorArguments.PARSER_ALL_OPTIONAL.apply(parser, null); - assertEquals(1, parsed.mineral); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{ \"mineral\": 1 }")) { + HasCtorArguments parsed = HasCtorArguments.PARSER_ALL_OPTIONAL.apply(parser, null); + assertEquals(1, parsed.mineral); + } } public void testMissingSecondConstructorArg() throws IOException { diff --git a/modules/apm/METERING.md b/modules/apm/METERING.md new file mode 100644 index 0000000000000..0f5fcc977295d --- /dev/null +++ b/modules/apm/METERING.md @@ -0,0 +1,142 @@ +# Metrics in Elasticsearch + +Elasticsearch has the metrics API available in server's package +`org.elasticsearch.telemetry.metric`. +This package contains base classes/interfaces for creating and working with metrics. +Please refer to the javadocs provided in these classes in that package for more details. +The entry point for working with metrics is `MeterRegistry`. + +## Implementation +We use elastic's apm-java-agent as an implementation of the API we expose. +the implementation can be found in `:modules:apm` +The apm-java-agent is responsible for buffering metrics and upon metrics_interval +send them over to apm server. +Metrics_interval is configured via a `tracing.apm.agent.metrics_interval` setting +The agent also collects a number of JVM metrics. +see https://www.elastic.co/guide/en/apm/agent/java/current/metrics.html#metrics-jvm + + +## How to choose an instrument + +The choice of the right instrument is not always easy as often differences are subtle. +The simplified algorithm could be as follows: + +1. You want to measure something (absolute value) + 1. values are non-additive + 1. use a gauge + 2. Example: a cpu temperature + 2. values are additive + 1. use asynchronous counter + 2. Example: total number of requests +2. You want to count something + 1. values are monotonously increasing + 1. use a counter + 2. Example: Recording a failed authentication count + 2. values can be decreased + 1. use UpDownCounter + 2. Example: Number of orders in a queue +3. You want to record a statistics + 1. use a histogram + 1. Example: A statistics about how long it took to access a value from cache + +refer to https://opentelemetry.io/docs/specs/otel/metrics/supplementary-guidelines/#instrument-selection +for more details + +## How to name an instrument +See the naming guidelines for metrics: +[NAMING GUIDE](NAMING.md) + +### Restarts and overflows +if the instrument is correctly chosen, the apm server will be able to determine if the metrics +were restarted (i.e. node was restarted) or there was a counter overflow +(the metric in ES might use an int internally, but apm backend might have a long ) + +## How to use an instrument +There are 2 types of usages of an instrument depending on a type. +- For synchronous instrument (counter/UpDownCounter) we need to register an instrument with + `MeterRegistry` and use the returned value to increment a value of that instrument +```java + MeterRegistry registry; + LongCounter longCounter = registry.registerLongCounter("es.test.requests.count", "a test counter", "count"); + longCounter.increment(); + longCounter.incrementBy(1, Map.of("name", "Alice")); + longCounter.incrementBy(1, Map.of("name", "Bob")); +``` + +- For asynchronous instrument (gauge/AsynchronousCounter) we register an instrument + and have to provide a callback that will report the absolute measured value. + This callback has to be provided upon registration and cannot be changed. +```java +MeterRegistry registry; +long someValue = 1; +registry.registerLongGauge("es.test.cpu.temperature", "the current CPU temperature as measured by psensor", "degrees Celsius", +() -> new LongWithAttributes(someValue, Map.of("cpuNumber", 1))); +``` + +If we don’t have access to ‘state’ that will be fetched on metric event (when callback is executed) +we can use a utility LongGaugeMetric or LongGaugeMetric +```java +MeterRegistry meterRegistry ; +LongGaugeMetric longGaugeMetric = LongGaugeMetric.create(meterRegistry, "es.test.gauge", "a test gauge", "total value"); +longGaugeMetric.set(123L); +``` +### The use of attributes aka dimensions +Each instrument can attach attributes to a reported value. This helps drilling down into the details +of value that was reported during the metric event + + +## Development + +### Mock http server + +The quickest way to verify that your metrics are working is to run `./gradlew run --with-apm-server`. +This will run ES node (or nodes in serverless) and also start a mock http server that will act +as an apm server. This fake http server will log all the http messages it receives from apm-agent + +### With APM server in cloud +You can also run local ES node with an apm server in cloud. +Create a new deployment in cloud, then click the 'hamburger' on the left, scroll to Observability and click APM under it. +At the upper right corner there is `Add data` link, then scroll down to `ApmAgents` section and pick Java +There you should be able to see `elastic.apm.secret_token` and `elastic.apm.server_url. You will use them in the next step. + +edit your `~/.gradle/init.d/apm.gradle` and replace the secret_token and the server_url. +```groovy +rootProject { + if (project.name == 'elasticsearch' && Boolean.getBoolean('metrics.enabled')) { + afterEvaluate { + testClusters.matching { it.name == "runTask" }.configureEach { + setting 'xpack.security.audit.enabled', 'true' + keystore 'tracing.apm.secret_token', 'TODO-REPLACE' + setting 'telemetry.metrics.enabled', 'true' + setting 'tracing.apm.agent.server_url', 'https://TODO-REPLACE-URL.apm.eastus2.staging.azure.foundit.no:443' + } + } + } +} +``` + +The example use: +``` +./gradlew :run -Dmetrics.enabled=true +``` + +#### Logging +with any approach you took to run your ES with APM you will find apm-agent.json file +in ES's logs directory. If there are any problems with connecting to APM you will see WARN/ERROR messages. +We run apm-agent with logs at WARN level, so normally you should not see any logs there. + +When running ES in cloud, logs are being also indexed in a logging cluster, so you will be able to find them +in kibana. The `datastream.dataset` is `elasticsearch.apm_agent` + + +### Testing +We currently provide a base `TestTelemetryPlugin` which should help you write an integration test. +See an example `S3BlobStoreRepositoryTests` + + + + +# Links and further reading +https://opentelemetry.io/docs/specs/otel/metrics/supplementary-guidelines/ + +https://www.elastic.co/guide/en/apm/guide/current/data-model-metrics.html diff --git a/modules/apm/NAMING.md b/modules/apm/NAMING.md new file mode 100644 index 0000000000000..8e8d1bf2463e2 --- /dev/null +++ b/modules/apm/NAMING.md @@ -0,0 +1,73 @@ +# Metrics Naming Guidelines + +We propose a set of guidelines to build consistent and readable names for metrics. The guidelines cover how to build a good **hierarchical** name, the syntax of **elements** in a name, the usage of **dimensions** (attributes), **pluralization **and** suffixes**. + +This set of “rules” has been built by looking at naming conventions and best practices used by other software (e.g. Prometheus, Datadog) or standards (OpenTelemetry, OpenMetrics - see for details). \ +They follow OpenTelemetry guidelines most closely with some ES specifics. + +## Guidelines + +A metric name should be composed of **elements** limited by **separators** to organize them in a **hierarchy**. + +**Elements** should be lower-case, and use underscore (`_`) to combine words in the same element level (e.g. `blob_cache`). + +The **separator** character is dot (`.`) + +The **hierarchy** should be built by putting "more common" elements at the beginning, in order to facilitate the creation of new metrics under a common namespace. Each element in the metric name specializes or describes the prefix that precedes it. Rule of thumb: you could truncate the name at any segment, and what you're left with is something that makes sense by itself. + +Example: +* prefer `es.indices.docs.deleted.total `to `es.indices.total.deleted.docs` +* This way you can later add` es.indices.docs.count, es.indices.docs.ingested.total`, etc.) + +Prefix metrics: +* Always use `es` as our root application name: this will give us a separate namespace and avoid any possibility of clashes with other metrics, and quick identification of Elasticsearch metrics on a dashboard. +* Follow the root prefix with a simple module name, team or area of code. E.g. `snapshot, repositories, indices, threadpool`. Notice the mix of singular and plural - here this is intentional, to reflect closely the existing names in the codebase (e.g. `reindex` and `indices`) +* In building a metric name, look for existing prefixes (e.g. module name and/or area of code, e.g. `blob_cache`) and for existing sub-elements as well (e.g. `error`) to build a good, consistent name. E.g. prefer the consistent use of `error.count` rather than introducing `failures`, `failed.count` or `errors`.` ` +* Avoid having sub-metrics under a name that is also a metric (e.g. do not create names like `es.repositories.elements`,` es.repositories.elements.utilization`; use` es.repositories.element.count` and` es.repositories.element.utilization `instead). Such metrics are hard to handle well in Elasticsearch, or in some internal structures (e.g. nested maps). + +Keep the hierarchy compact: do not add elements if you don’t need to. There is a description field when registering a metric, prefer using that as an explanation. \ +For example, if emitting existing metrics from node stats, do not use the whole “object path”, but choose the most significant terms. + +The metric name can be generated but there should be no dynamic or variable content in the name: that content belongs to a **dimension** (attributes/labels). + +* Node name, node id, cluster id, etc. are all considered dynamic content that belongs to attributes, not to the metric name. +* When there are different "flavors" of a metric (i.e. `s3`, `azure`, etc) use an attribute rather than inserting it in the metric name. +* Rule of thumb: you should be able to do aggregations (e.g. sum, avg) across a dimension of a given metric (without the need to aggregate over different metric names); on the other hand, any aggregation across any dimension of a given metric should be meaningful. +* There might be exceptions of course. For example: + * When similar metrics have significantly different implementations/related metrics. \ + If we have only common metrics like `es.repositories.element.count, es.repositories.element.utilization, es.repositories.writes.total` for every blob storage implementation, then `s3,azure` should be an attribute. \ + If we have specific metrics, e.g. for s3 storage classes, prefer using prefixed metric names for the specific metrics: es.repositories.s3.deep_archive_access.total (but keep `es.repositories.elements`) + * When you have a finite and fixed set of names it might be OK to have them in the name (e.g. "`young`" and "`old`" for GC generations). + +The metric name should NOT include its **unit**. Instead, the associated physical quantity should be added as a suffix, possibly following the general semantic names ([link](https://opentelemetry.io/docs/specs/semconv/general/metrics/#instrument-naming)). +Examples : +* es.process.jvm.collection.time instead of es.process.jvm.collection.seconds. +* es.process.mem.virtual.size, es.indices.storage.size (instead of es.process.mem.virtual.bytes, es.indices.storage.bytes) +* In case `size` has a known upper limit, consider using `usage` (e.g.: es.process.jvm.heap.usage when there is a es.process.jvm.heap.limit) +* es.indices.storage.write.io, instead of es.indices.storage.write.bytes_per_sec +* These can all be composed with the suffixes below, e.g. es.process.jvm.collection.time.total, es.indices.storage.write.total to represent the monotonic sum of time spent in GC and the total number of bytes written to indices respectively. + +**Pluralization** and **suffixes**: +* If the metric is unit-less, use plural: `es.threadpool.activethreads`, `es.indices.docs` +* Use `total` as a suffix for monotonic sums (e.g. es.indices.docs.deleted.total) +* Use `count` to represent the count of "things" in the metric name/namespace (e.g. if we have `es.process.jvm.classes.loaded`, we will express the number of classes currently loaded by the JVM as es.process.jvm.classes.loaded.count, and the total number of classes loaded since the JVM started as es.process.jvm.classes.loaded.total +* Use `ratio` to represent the ratio of two measures with identical unit (or unit-less) or measures that represent a fraction in the range [0, 1]. Examples: + * Exception: consider using utilization when the ratio is between a usage and its limit, e.g. the ratio between es.process.jvm.heap.usage and es.process.jvm.heap.limit should be es.process.jvm.heap.utilization +* If it has a unit of measure, then it should not be plural (and also not include the unit of measure, see above). Examples: es.process.jvm.collection.time, es.process.mem.virtual.usage, es.indices.storage.utilization + +### Attributes + +Attribute names should follow the same rules. In particular, these rules apply to attributes too: +* elements and separators +* hierarchy/namespaces +* units +* pluralization (when an attribute represents a measurement) + +For **pluralization**, when an attribute represents an entity, the attribute name should be singular (e.g.` es.security.realm_type`, not` es.security.realms_type` or `es.security.realm_types`), unless it represents a collection (e.g.` es.rest.request_headers`) + + +### List of previously registered metric names +You can inspect all previously registered metrics names with +`./gradlew run -Dtests.es.logger.org.elasticsearch.telemetry.apm=debug` +This should help you find out the already registered group that your meteric +might fit diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java index cd6d3d209b3ed..382fc9417eac0 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/APMMeterRegistry.java @@ -10,6 +10,8 @@ import io.opentelemetry.api.metrics.Meter; +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.telemetry.apm.internal.metrics.DoubleAsyncCounterAdapter; @@ -47,6 +49,7 @@ * {@link #setProvider(Meter)} is used to change the provider for all existing meterRegistrar. */ public class APMMeterRegistry implements MeterRegistry { + private static final Logger logger = LogManager.getLogger(APMMeterRegistry.class); private final Registrar doubleCounters = new Registrar<>(); private final Registrar doubleAsynchronousCounters = new Registrar<>(); private final Registrar doubleUpDownCounters = new Registrar<>(); @@ -207,6 +210,7 @@ public LongHistogram getLongHistogram(String name) { private > T register(Registrar registrar, T adapter) { assert registrars.contains(registrar) : "usage of unknown registrar"; + logger.debug("Registering an instrument with name: " + adapter.getName()); return registrar.register(adapter); } diff --git a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java index 49fdc44681aa3..667bcab08f2e4 100644 --- a/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java +++ b/modules/apm/src/main/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracer.java @@ -34,7 +34,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.tasks.Task; -import org.elasticsearch.telemetry.tracing.SpanId; +import org.elasticsearch.telemetry.tracing.Traceable; import java.security.AccessController; import java.security.PrivilegedAction; @@ -61,7 +61,7 @@ public class APMTracer extends AbstractLifecycleComponent implements org.elastic private static final Logger logger = LogManager.getLogger(APMTracer.class); /** Holds in-flight span information. */ - private final Map spans = ConcurrentCollections.newConcurrentMap(); + private final Map spans = ConcurrentCollections.newConcurrentMap(); private volatile boolean enabled; private volatile APMServices services; @@ -160,8 +160,9 @@ private void destroyApmServices() { } @Override - public void startTrace(ThreadContext threadContext, SpanId spanId, String spanName, @Nullable Map attributes) { + public void startTrace(ThreadContext threadContext, Traceable traceable, String spanName, @Nullable Map attributes) { assert threadContext != null; + String spanId = traceable.getSpanId(); assert spanId != null; assert spanName != null; @@ -276,12 +277,12 @@ private Context getParentContext(ThreadContext threadContext) { * However, if a scope is active, then the APM agent can capture additional information, so this method * exists to make it possible to use scopes in the few situation where it makes sense. * - * @param spanId the ID of a currently-open span for which to open a scope. + * @param traceable provides the ID of a currently-open span for which to open a scope. * @return a method to close the scope when you are finished with it. */ @Override - public Releasable withScope(SpanId spanId) { - final Context context = spans.get(spanId); + public Releasable withScope(Traceable traceable) { + final Context context = spans.get(traceable.getSpanId()); if (context != null) { var scope = AccessController.doPrivileged((PrivilegedAction) context::makeCurrent); return scope::close; @@ -337,50 +338,50 @@ private void setSpanAttributes(ThreadContext threadContext, @Nullable Map) () -> { span.end(); return null; @@ -400,8 +401,8 @@ public void stopTrace() { } @Override - public void addEvent(SpanId spanId, String eventName) { - final var span = Span.fromContextOrNull(spans.get(spanId)); + public void addEvent(Traceable traceable, String eventName) { + final var span = Span.fromContextOrNull(spans.get(traceable.getSpanId())); if (span != null) { span.addEvent(eventName); } @@ -425,7 +426,7 @@ private static boolean isSupportedContextKey(String key) { } // VisibleForTesting - Map getSpans() { + Map getSpans() { return spans; } diff --git a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracerTests.java b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracerTests.java index 8cb94b782756d..04a4e1b3f3a34 100644 --- a/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracerTests.java +++ b/modules/apm/src/test/java/org/elasticsearch/telemetry/apm/internal/tracing/APMTracerTests.java @@ -22,13 +22,14 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.tasks.Task; import org.elasticsearch.telemetry.apm.internal.APMAgentSettings; -import org.elasticsearch.telemetry.tracing.SpanId; +import org.elasticsearch.telemetry.tracing.Traceable; import org.elasticsearch.test.ESTestCase; import java.time.Instant; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.concurrent.TimeUnit; import java.util.stream.Stream; @@ -44,9 +45,9 @@ public class APMTracerTests extends ESTestCase { - private static final SpanId SPAN_ID1 = SpanId.forBareString("id1"); - private static final SpanId SPAN_ID2 = SpanId.forBareString("id2"); - private static final SpanId SPAN_ID3 = SpanId.forBareString("id3"); + private static final Traceable TRACEABLE1 = new TestTraceable("id1"); + private static final Traceable TRACEABLE2 = new TestTraceable("id2"); + private static final Traceable TRACEABLE3 = new TestTraceable("id3"); /** * Check that the tracer doesn't create spans when tracing is disabled. @@ -55,7 +56,7 @@ public void test_onTraceStarted_withTracingDisabled_doesNotStartTrace() { Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), false).build(); APMTracer apmTracer = buildTracer(settings); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID1, "name1", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name1", null); assertThat(apmTracer.getSpans(), anEmptyMap()); } @@ -70,7 +71,7 @@ public void test_onTraceStarted_withSpanNameOmitted_doesNotStartTrace() { .build(); APMTracer apmTracer = buildTracer(settings); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID1, "name1", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name1", null); assertThat(apmTracer.getSpans(), anEmptyMap()); } @@ -82,10 +83,10 @@ public void test_onTraceStarted_startsTrace() { Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true).build(); APMTracer apmTracer = buildTracer(settings); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID1, "name1", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name1", null); assertThat(apmTracer.getSpans(), aMapWithSize(1)); - assertThat(apmTracer.getSpans(), hasKey(SPAN_ID1)); + assertThat(apmTracer.getSpans(), hasKey(TRACEABLE1.getSpanId())); } /** @@ -99,10 +100,10 @@ public void test_onTraceStartedWithStartTime_startsTrace() { // 1_000_000L because of "toNanos" conversions that overflow for large long millis Instant spanStartTime = Instant.ofEpochMilli(randomLongBetween(0, Long.MAX_VALUE / 1_000_000L)); threadContext.putTransient(Task.TRACE_START_TIME, spanStartTime); - apmTracer.startTrace(threadContext, SPAN_ID1, "name1", null); + apmTracer.startTrace(threadContext, TRACEABLE1, "name1", null); assertThat(apmTracer.getSpans(), aMapWithSize(1)); - assertThat(apmTracer.getSpans(), hasKey(SPAN_ID1)); + assertThat(apmTracer.getSpans(), hasKey(TRACEABLE1.getSpanId())); assertThat(((SpyAPMTracer) apmTracer).getSpanStartTime("name1"), is(spanStartTime)); } @@ -113,8 +114,8 @@ public void test_onTraceStopped_stopsTrace() { Settings settings = Settings.builder().put(APMAgentSettings.APM_ENABLED_SETTING.getKey(), true).build(); APMTracer apmTracer = buildTracer(settings); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID1, "name1", null); - apmTracer.stopTrace(SPAN_ID1); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name1", null); + apmTracer.stopTrace(TRACEABLE1); assertThat(apmTracer.getSpans(), anEmptyMap()); } @@ -131,7 +132,7 @@ public void test_whenTraceStarted_threadContextIsPopulated() { APMTracer apmTracer = buildTracer(settings); ThreadContext threadContext = new ThreadContext(settings); - apmTracer.startTrace(threadContext, SPAN_ID1, "name1", null); + apmTracer.startTrace(threadContext, TRACEABLE1, "name1", null); assertThat(threadContext.getTransient(Task.APM_TRACE_CONTEXT), notNullValue()); } @@ -152,13 +153,13 @@ public void test_whenTraceStarted_andSpanNameIncluded_thenSpanIsStarted() { .build(); APMTracer apmTracer = buildTracer(settings); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID1, "name-aaa", null); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID2, "name-bbb", null); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID3, "name-ccc", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name-aaa", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE2, "name-bbb", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE3, "name-ccc", null); - assertThat(apmTracer.getSpans(), hasKey(SPAN_ID1)); - assertThat(apmTracer.getSpans(), hasKey(SPAN_ID2)); - assertThat(apmTracer.getSpans(), not(hasKey(SPAN_ID3))); + assertThat(apmTracer.getSpans(), hasKey(TRACEABLE1.getSpanId())); + assertThat(apmTracer.getSpans(), hasKey(TRACEABLE2.getSpanId())); + assertThat(apmTracer.getSpans(), not(hasKey(TRACEABLE3.getSpanId()))); } /** @@ -175,7 +176,7 @@ public void test_whenTraceStarted_andSpanNameIncludedAndExcluded_thenSpanIsNotSt .build(); APMTracer apmTracer = buildTracer(settings); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID1, "name-aaa", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name-aaa", null); assertThat(apmTracer.getSpans(), not(hasKey("id1"))); } @@ -197,13 +198,13 @@ public void test_whenTraceStarted_andSpanNameExcluded_thenSpanIsNotStarted() { .build(); APMTracer apmTracer = buildTracer(settings); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID1, "name-aaa", null); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID2, "name-bbb", null); - apmTracer.startTrace(new ThreadContext(settings), SPAN_ID3, "name-ccc", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE1, "name-aaa", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE2, "name-bbb", null); + apmTracer.startTrace(new ThreadContext(settings), TRACEABLE3, "name-ccc", null); - assertThat(apmTracer.getSpans(), not(hasKey(SPAN_ID1))); - assertThat(apmTracer.getSpans(), not(hasKey(SPAN_ID2))); - assertThat(apmTracer.getSpans(), hasKey(SPAN_ID3)); + assertThat(apmTracer.getSpans(), not(hasKey(TRACEABLE1.getSpanId()))); + assertThat(apmTracer.getSpans(), not(hasKey(TRACEABLE2.getSpanId()))); + assertThat(apmTracer.getSpans(), hasKey(TRACEABLE3.getSpanId())); } /** @@ -360,4 +361,17 @@ public Span startSpan() { } } } + + private static class TestTraceable implements Traceable { + private final String spanId; + + TestTraceable(String spanId) { + this.spanId = Objects.requireNonNull(spanId); + } + + @Override + public String getSpanId() { + return spanId; + } + } } diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java index c3e59be54cc7f..321c5f46866f9 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/DataStreamsSnapshotsIT.java @@ -59,6 +59,7 @@ import java.util.stream.Collectors; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.anEmptyMap; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; @@ -161,9 +162,11 @@ public void testSnapshotAndRestore() throws Exception { assertEquals(1, restoreSnapshotResponse.getRestoreInfo().successfulShards()); assertEquals(DOCUMENT_SOURCE, client.prepareGet(dsBackingIndexName, id).get().getSourceAsMap()); - SearchHit[] hits = client.prepareSearch("ds").get().getHits().getHits(); - assertEquals(1, hits.length); - assertEquals(DOCUMENT_SOURCE, hits[0].getSourceAsMap()); + assertResponse(client.prepareSearch("ds"), response -> { + SearchHit[] hits = response.getHits().getHits(); + assertEquals(1, hits.length); + assertEquals(DOCUMENT_SOURCE, hits[0].getSourceAsMap()); + }); GetDataStreamAction.Response ds = client.execute( GetDataStreamAction.INSTANCE, @@ -219,9 +222,11 @@ public void testSnapshotAndRestoreAllDataStreamsInPlace() throws Exception { assertEquals(1, restoreSnapshotResponse.getRestoreInfo().successfulShards()); assertEquals(DOCUMENT_SOURCE, client.prepareGet(dsBackingIndexName, id).get().getSourceAsMap()); - SearchHit[] hits = client.prepareSearch("ds").get().getHits().getHits(); - assertEquals(1, hits.length); - assertEquals(DOCUMENT_SOURCE, hits[0].getSourceAsMap()); + assertResponse(client.prepareSearch("ds"), response -> { + SearchHit[] hits = response.getHits().getHits(); + assertEquals(1, hits.length); + assertEquals(DOCUMENT_SOURCE, hits[0].getSourceAsMap()); + }); GetDataStreamAction.Request getDataSteamRequest = new GetDataStreamAction.Request(new String[] { "*" }); GetDataStreamAction.Response ds = client.execute(GetDataStreamAction.INSTANCE, getDataSteamRequest).get(); @@ -271,9 +276,12 @@ public void testSnapshotAndRestoreInPlace() { assertEquals(1, restoreSnapshotResponse.getRestoreInfo().successfulShards()); assertEquals(DOCUMENT_SOURCE, client.prepareGet(dsBackingIndexName, id).get().getSourceAsMap()); - SearchHit[] hits = client.prepareSearch("ds").get().getHits().getHits(); - assertEquals(1, hits.length); - assertEquals(DOCUMENT_SOURCE, hits[0].getSourceAsMap()); + + assertResponse(client.prepareSearch("ds"), response -> { + SearchHit[] hits = response.getHits().getHits(); + assertEquals(1, hits.length); + assertEquals(DOCUMENT_SOURCE, hits[0].getSourceAsMap()); + }); GetDataStreamAction.Request getDataSteamRequest = new GetDataStreamAction.Request(new String[] { "ds" }); GetDataStreamAction.Response ds = client.execute(GetDataStreamAction.INSTANCE, getDataSteamRequest).actionGet(); @@ -347,9 +355,11 @@ public void testSnapshotAndRestoreAllIncludeSpecificDataStream() throws Exceptio assertEquals(1, restoreSnapshotResponse.getRestoreInfo().successfulShards()); assertEquals(DOCUMENT_SOURCE, client.prepareGet(backingIndexName, idToGet).get().getSourceAsMap()); - SearchHit[] hits = client.prepareSearch(backingIndexName).get().getHits().getHits(); - assertEquals(1, hits.length); - assertEquals(DOCUMENT_SOURCE, hits[0].getSourceAsMap()); + assertResponse(client.prepareSearch(backingIndexName), response -> { + SearchHit[] hits = response.getHits().getHits(); + assertEquals(1, hits.length); + assertEquals(DOCUMENT_SOURCE, hits[0].getSourceAsMap()); + }); GetDataStreamAction.Response ds = client.execute( GetDataStreamAction.INSTANCE, @@ -396,9 +406,11 @@ public void testSnapshotAndRestoreReplaceAll() throws Exception { assertEquals(2, restoreSnapshotResponse.getRestoreInfo().successfulShards()); assertEquals(DOCUMENT_SOURCE, client.prepareGet(dsBackingIndexName, id).get().getSourceAsMap()); - SearchHit[] hits = client.prepareSearch("ds").get().getHits().getHits(); - assertEquals(1, hits.length); - assertEquals(DOCUMENT_SOURCE, hits[0].getSourceAsMap()); + assertResponse(client.prepareSearch("ds"), response -> { + SearchHit[] hits = response.getHits().getHits(); + assertEquals(1, hits.length); + assertEquals(DOCUMENT_SOURCE, hits[0].getSourceAsMap()); + }); GetDataStreamAction.Response ds = client.execute( GetDataStreamAction.INSTANCE, @@ -449,9 +461,11 @@ public void testSnapshotAndRestoreAll() throws Exception { assertEquals(2, restoreSnapshotResponse.getRestoreInfo().successfulShards()); assertEquals(DOCUMENT_SOURCE, client.prepareGet(dsBackingIndexName, id).get().getSourceAsMap()); - SearchHit[] hits = client.prepareSearch("ds").get().getHits().getHits(); - assertEquals(1, hits.length); - assertEquals(DOCUMENT_SOURCE, hits[0].getSourceAsMap()); + assertResponse(client.prepareSearch("ds"), response -> { + SearchHit[] hits = response.getHits().getHits(); + assertEquals(1, hits.length); + assertEquals(DOCUMENT_SOURCE, hits[0].getSourceAsMap()); + }); GetDataStreamAction.Response ds = client.execute( GetDataStreamAction.INSTANCE, @@ -505,9 +519,11 @@ public void testSnapshotAndRestoreIncludeAliasesFalse() throws Exception { assertEquals(2, restoreSnapshotResponse.getRestoreInfo().successfulShards()); assertEquals(DOCUMENT_SOURCE, client.prepareGet(dsBackingIndexName, id).get().getSourceAsMap()); - SearchHit[] hits = client.prepareSearch("ds").get().getHits().getHits(); - assertEquals(1, hits.length); - assertEquals(DOCUMENT_SOURCE, hits[0].getSourceAsMap()); + assertResponse(client.prepareSearch("ds"), response -> { + SearchHit[] hits = response.getHits().getHits(); + assertEquals(1, hits.length); + assertEquals(DOCUMENT_SOURCE, hits[0].getSourceAsMap()); + }); GetDataStreamAction.Response ds = client.execute( GetDataStreamAction.INSTANCE, @@ -557,7 +573,10 @@ public void testRename() throws Exception { assertEquals(1, ds.getDataStreams().size()); assertEquals(1, ds.getDataStreams().get(0).getDataStream().getIndices().size()); assertEquals(ds2BackingIndexName, ds.getDataStreams().get(0).getDataStream().getIndices().get(0).getName()); - assertEquals(DOCUMENT_SOURCE, client.prepareSearch("ds2").get().getHits().getHits()[0].getSourceAsMap()); + assertResponse( + client.prepareSearch("ds2"), + response -> assertEquals(DOCUMENT_SOURCE, response.getHits().getHits()[0].getSourceAsMap()) + ); assertEquals(DOCUMENT_SOURCE, client.prepareGet(ds2BackingIndexName, id).get().getSourceAsMap()); GetAliasesResponse getAliasesResponse = client.admin().indices().getAliases(new GetAliasesRequest("my-alias")).actionGet(); diff --git a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java index ab42d831c6545..fa4aa5920b83d 100644 --- a/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java +++ b/modules/data-streams/src/internalClusterTest/java/org/elasticsearch/datastreams/TSDBIndexingIT.java @@ -49,6 +49,7 @@ import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -400,11 +401,12 @@ public void testSkippingShards() throws Exception { var searchRequest = new SearchRequest("pattern-*"); searchRequest.setPreFilterShardSize(1); searchRequest.source(matchingRange); - var searchResponse = client().search(searchRequest).actionGet(); - ElasticsearchAssertions.assertHitCount(searchResponse, 2); - assertThat(searchResponse.getTotalShards(), equalTo(2)); - assertThat(searchResponse.getSkippedShards(), equalTo(0)); - assertThat(searchResponse.getSuccessfulShards(), equalTo(2)); + assertResponse(client().search(searchRequest), searchResponse -> { + ElasticsearchAssertions.assertHitCount(searchResponse, 2); + assertThat(searchResponse.getTotalShards(), equalTo(2)); + assertThat(searchResponse.getSkippedShards(), equalTo(0)); + assertThat(searchResponse.getSuccessfulShards(), equalTo(2)); + }); } { var nonMatchingRange = new SearchSourceBuilder().query( @@ -414,11 +416,12 @@ public void testSkippingShards() throws Exception { var searchRequest = new SearchRequest("pattern-*"); searchRequest.setPreFilterShardSize(1); searchRequest.source(nonMatchingRange); - var searchResponse = client().search(searchRequest).actionGet(); - ElasticsearchAssertions.assertNoSearchHits(searchResponse); - assertThat(searchResponse.getTotalShards(), equalTo(2)); - assertThat(searchResponse.getSkippedShards(), equalTo(1)); - assertThat(searchResponse.getSuccessfulShards(), equalTo(2)); + assertResponse(client().search(searchRequest), searchResponse -> { + ElasticsearchAssertions.assertNoSearchHits(searchResponse); + assertThat(searchResponse.getTotalShards(), equalTo(2)); + assertThat(searchResponse.getSkippedShards(), equalTo(1)); + assertThat(searchResponse.getSuccessfulShards(), equalTo(2)); + }); } } @@ -536,17 +539,19 @@ public void testTrimId() throws Exception { ); // Check the search api can synthesize _id + final String idxName = indexName; var searchRequest = new SearchRequest(dataStreamName); searchRequest.source().trackTotalHits(true); - var searchResponse = client().search(searchRequest).actionGet(); - assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numBulkRequests * numDocsPerBulk)); - String id = searchResponse.getHits().getHits()[0].getId(); - assertThat(id, notNullValue()); - - // Check that the _id is gettable: - var getResponse = client().get(new GetRequest(indexName).id(id)).actionGet(); - assertThat(getResponse.isExists(), is(true)); - assertThat(getResponse.getId(), equalTo(id)); + assertResponse(client().search(searchRequest), searchResponse -> { + assertThat(searchResponse.getHits().getTotalHits().value, equalTo((long) numBulkRequests * numDocsPerBulk)); + String id = searchResponse.getHits().getHits()[0].getId(); + assertThat(id, notNullValue()); + + // Check that the _id is gettable: + var getResponse = client().get(new GetRequest(idxName).id(id)).actionGet(); + assertThat(getResponse.isExists(), is(true)); + assertThat(getResponse.getId(), equalTo(id)); + }); } static String formatInstant(Instant instant) { diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java index 7ec2d32851ea5..519499addd77e 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamIndexSettingsProvider.java @@ -83,7 +83,7 @@ public Settings getAdditionalIndexSettings( if (indexMode != null) { if (indexMode == IndexMode.TIME_SERIES) { Settings.Builder builder = Settings.builder(); - TimeValue lookAheadTime = DataStreamsPlugin.LOOK_AHEAD_TIME.get(allSettings); + TimeValue lookAheadTime = DataStreamsPlugin.getLookAheadTime(allSettings); TimeValue lookBackTime = DataStreamsPlugin.LOOK_BACK_TIME.get(allSettings); final Instant start; final Instant end; diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java index fb93b7d688a74..f3739747d96cc 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/DataStreamsPlugin.java @@ -90,15 +90,28 @@ public class DataStreamsPlugin extends Plugin implements ActionPlugin, HealthPlu Setting.Property.Dynamic ); + private static final TimeValue MAX_LOOK_AHEAD_TIME = TimeValue.timeValueHours(2); public static final Setting LOOK_AHEAD_TIME = Setting.timeSetting( "index.look_ahead_time", TimeValue.timeValueHours(2), TimeValue.timeValueMinutes(1), - TimeValue.timeValueDays(7), + TimeValue.timeValueDays(7), // is effectively 2h now. Setting.Property.IndexScope, Setting.Property.Dynamic, Setting.Property.ServerlessPublic ); + + /** + * Returns the look ahead time and lowers it when it to 2 hours if it is configured to more than 2 hours. + */ + public static TimeValue getLookAheadTime(Settings settings) { + TimeValue lookAheadTime = DataStreamsPlugin.LOOK_AHEAD_TIME.get(settings); + if (lookAheadTime.compareTo(DataStreamsPlugin.MAX_LOOK_AHEAD_TIME) > 0) { + lookAheadTime = DataStreamsPlugin.MAX_LOOK_AHEAD_TIME; + } + return lookAheadTime; + } + public static final String LIFECYCLE_CUSTOM_INDEX_METADATA_KEY = "data_stream_lifecycle"; public static final Setting LOOK_BACK_TIME = Setting.timeSetting( "index.look_back_time", diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeService.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeService.java index f973eb95b39ce..3bbc37cd87ad5 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeService.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeService.java @@ -108,7 +108,7 @@ ClusterState updateTimeSeriesTemporalRange(ClusterState current, Instant now) { Index head = dataStream.getWriteIndex(); IndexMetadata im = current.metadata().getIndexSafe(head); Instant currentEnd = IndexSettings.TIME_SERIES_END_TIME.get(im.getSettings()); - TimeValue lookAheadTime = DataStreamsPlugin.LOOK_AHEAD_TIME.get(im.getSettings()); + TimeValue lookAheadTime = DataStreamsPlugin.getLookAheadTime(im.getSettings()); Instant newEnd = DataStream.getCanonicalTimestampBound( now.plus(lookAheadTime.getMillis(), ChronoUnit.MILLIS).plus(pollInterval.getMillis(), ChronoUnit.MILLIS) ); diff --git a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java index 029fd88abd9c6..2aa5c07ad6be5 100644 --- a/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java +++ b/modules/data-streams/src/main/java/org/elasticsearch/datastreams/action/DataStreamsStatsTransportAction.java @@ -229,11 +229,11 @@ public DataStreamsStatsAction.Response newResponse( assert dataStream != null; // Aggregate global stats - totalStoreSizeBytes += shardStat.getStoreStats().sizeInBytes(); + totalStoreSizeBytes += shardStat.getStoreStats().totalDataSetSizeInBytes(); // Aggregate data stream stats AggregatedStats stats = aggregatedDataStreamsStats.computeIfAbsent(dataStream.getName(), s -> new AggregatedStats()); - stats.storageBytes += shardStat.getStoreStats().sizeInBytes(); + stats.storageBytes += shardStat.getStoreStats().totalDataSetSizeInBytes(); stats.maxTimestamp = Math.max(stats.maxTimestamp, shardStat.getMaxTimestamp()); } diff --git a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java index c383991dba19c..0b565d835465f 100644 --- a/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java +++ b/modules/data-streams/src/test/java/org/elasticsearch/datastreams/UpdateTimeSeriesRangeServiceTests.java @@ -94,7 +94,7 @@ public void testUpdateTimeSeriesTemporalRange() { } public void testUpdateTimeSeriesTemporalRange_customLookAHeadTime() { - int lookAHeadTimeMinutes = randomIntBetween(30, 180); + int lookAHeadTimeMinutes = randomIntBetween(30, 120); TemporalAmount lookAHeadTime = Duration.ofMinutes(lookAHeadTimeMinutes); int timeSeriesPollIntervalMinutes = randomIntBetween(1, 10); TemporalAmount timeSeriesPollInterval = Duration.ofMinutes(timeSeriesPollIntervalMinutes); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AppendProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AppendProcessor.java index 273b76955060b..4f28c9bb14f80 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AppendProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/AppendProcessor.java @@ -48,9 +48,10 @@ public ValueSource getValue() { } @Override - public IngestDocument execute(IngestDocument ingestDocument) throws Exception { - ingestDocument.appendFieldValue(field, value, allowDuplicates); - return ingestDocument; + public IngestDocument execute(IngestDocument document) throws Exception { + String path = document.renderTemplate(field); + document.appendFieldValue(path, value, allowDuplicates); + return document; } @Override diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java index 7b20cfbf0b398..3bf82be24330e 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/JsonProcessor.java @@ -8,8 +8,6 @@ package org.elasticsearch.ingest.common; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.core.Strings; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.ConfigurationUtils; @@ -20,7 +18,6 @@ import org.elasticsearch.xcontent.json.JsonXContent; import java.io.IOException; -import java.io.InputStream; import java.util.Locale; import java.util.Map; @@ -90,10 +87,11 @@ public ConflictStrategy getAddToRootConflictStrategy() { } public static Object apply(Object fieldValue, boolean allowDuplicateKeys, boolean strictJsonParsing) { - BytesReference bytesRef = fieldValue == null ? new BytesArray("null") : new BytesArray(fieldValue.toString()); try ( - InputStream stream = bytesRef.streamInput(); - XContentParser parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, stream) + XContentParser parser = JsonXContent.jsonXContent.createParser( + XContentParserConfiguration.EMPTY, + fieldValue == null ? "null" : fieldValue.toString() + ) ) { parser.allowDuplicateKeys(allowDuplicateKeys); XContentParser.Token token = parser.nextToken(); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java index c27bc4de85ec4..fa86bcda5047b 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java @@ -64,7 +64,7 @@ private void fieldsToRemoveProcessor(IngestDocument document) { } } else { for (TemplateScript.Factory field : fieldsToRemove) { - document.removeField(field); + document.removeField(document.renderTemplate(field)); } } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java index 84e66a3134b69..b629f00545aec 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java @@ -10,6 +10,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.ingest.AbstractProcessor; import org.elasticsearch.ingest.IngestDocument; @@ -25,7 +26,6 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; -import java.io.InputStream; import java.util.Arrays; import java.util.Map; @@ -108,9 +108,11 @@ public ScriptProcessor create( ) throws Exception { try ( XContentBuilder builder = XContentBuilder.builder(JsonXContent.jsonXContent).map(config); - InputStream stream = BytesReference.bytes(builder).streamInput(); - XContentParser parser = XContentType.JSON.xContent() - .createParser(XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), stream) + XContentParser parser = XContentHelper.createParserNotCompressed( + XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), + BytesReference.bytes(builder), + XContentType.JSON + ) ) { Script script = Script.parse(parser); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SetProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SetProcessor.java index 229b796b89c75..2d7db39f3738e 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SetProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/SetProcessor.java @@ -78,12 +78,13 @@ public boolean isIgnoreEmptyValue() { @Override public IngestDocument execute(IngestDocument document) { - if (overrideEnabled || document.hasField(field) == false || document.getFieldValue(field, Object.class) == null) { + String path = document.renderTemplate(field); + if (overrideEnabled || document.hasField(path) == false || document.getFieldValue(path, Object.class) == null) { if (copyFrom != null) { Object fieldValue = document.getFieldValue(copyFrom, Object.class, ignoreEmptyValue); - document.setFieldValue(field, IngestDocument.deepCopy(fieldValue), ignoreEmptyValue); + document.setFieldValue(path, IngestDocument.deepCopy(fieldValue), ignoreEmptyValue); } else { - document.setFieldValue(field, value, ignoreEmptyValue); + document.setFieldValue(path, value, ignoreEmptyValue); } } return document; diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java index 1e40345208a1b..0b20fbc22e1cc 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java @@ -120,7 +120,7 @@ public void testMatchWithoutCaptures() throws Exception { public void testNullField() { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument doc = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); - doc.setFieldValue(fieldName, null); + doc.setFieldValue(fieldName, (Object) null); GrokProcessor processor = new GrokProcessor( randomAlphaOfLength(10), null, @@ -138,7 +138,7 @@ public void testNullField() { public void testNullFieldWithIgnoreMissing() throws Exception { String fieldName = RandomDocumentPicks.randomFieldName(random()); IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); - originalIngestDocument.setFieldValue(fieldName, null); + originalIngestDocument.setFieldValue(fieldName, (Object) null); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); GrokProcessor processor = new GrokProcessor( randomAlphaOfLength(10), diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java index 1d10c30909906..f472e9d9bacd4 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java @@ -123,7 +123,7 @@ public void testRenameNewFieldAlreadyExists() throws Exception { public void testRenameExistingFieldNullValue() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - ingestDocument.setFieldValue(fieldName, null); + ingestDocument.setFieldValue(fieldName, (Object) null); String newFieldName = randomValueOtherThanMany(ingestDocument::hasField, () -> RandomDocumentPicks.randomFieldName(random())); Processor processor = createRenameProcessor(fieldName, newFieldName, false); processor.execute(ingestDocument); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java index bd6a29181c657..09c5c58e3664d 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorFactoryTests.java @@ -167,4 +167,17 @@ public void testMediaType() throws Exception { ElasticsearchException e = expectThrows(ElasticsearchException.class, () -> factory.create(null, processorTag, null, config2)); assertThat(e.getMessage(), containsString("property does not contain a supported media type [" + expectedMediaType + "]")); } + + public void testCreateWithEmptyField() throws Exception { + // edge case: it's valid (according to the current validation) to *create* a set processor that has an empty string as its 'field'. + // it will fail at ingest execution time, but we don't reject it at pipeline creation time. + Map config = new HashMap<>(); + config.put("field", ""); + config.put("value", "value1"); + String processorTag = randomAlphaOfLength(10); + SetProcessor setProcessor = factory.create(null, processorTag, null, config); + assertThat(setProcessor.getTag(), equalTo(processorTag)); + assertThat(setProcessor.getField().newInstance(Map.of()).execute(), equalTo("")); + assertThat(setProcessor.getValue().copyAndResolve(Map.of()), equalTo("value1")); + } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorTests.java index 5973e4fe5741c..6cef9d3ecde8a 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/SetProcessorTests.java @@ -61,15 +61,11 @@ public void testSetFieldsTypeMismatch() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); ingestDocument.setFieldValue("field", "value"); Processor processor = createSetProcessor("field.inner", "value", null, true, false); - try { - processor.execute(ingestDocument); - fail("processor execute should have failed"); - } catch (IllegalArgumentException e) { - assertThat( - e.getMessage(), - equalTo("cannot set [inner] with parent object of type [java.lang.String] as " + "part of path [field.inner]") - ); - } + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); + assertThat( + exception.getMessage(), + equalTo("cannot set [inner] with parent object of type [java.lang.String] as part of path [field.inner]") + ); } public void testSetNewFieldWithOverrideDisabled() throws Exception { @@ -184,20 +180,6 @@ public void testCopyFromOtherField() throws Exception { } } - private static void assertMapEquals(Object actual, Object expected) { - if (expected instanceof Map expectedMap) { - Map actualMap = (Map) actual; - assertThat(actualMap.keySet().toArray(), arrayContainingInAnyOrder(expectedMap.keySet().toArray())); - for (Map.Entry entry : actualMap.entrySet()) { - if (entry.getValue() instanceof Map) { - assertMapEquals(entry.getValue(), expectedMap.get(entry.getKey())); - } else { - assertThat(entry.getValue(), equalTo(expectedMap.get(entry.getKey()))); - } - } - } - } - public void testCopyFromDeepCopiesNonPrimitiveMutableTypes() throws Exception { final String originalField = "originalField"; final String targetField = "targetField"; @@ -256,6 +238,15 @@ public void testCopyFromDeepCopiesNonPrimitiveMutableTypes() throws Exception { assertThat(ingestDocument.getFieldValue(targetField, Object.class), equalTo(preservedDate)); } + public void testSetEmptyField() { + // edge case: it's valid (according to the current validation) to *create* a set processor that has an empty string as its 'field', + // but it will fail at ingest execution time. + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); + Processor processor = createSetProcessor("", "some_value", null, false, false); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, () -> processor.execute(ingestDocument)); + assertThat(exception.getMessage(), equalTo("path cannot be null nor empty")); + } + private static Processor createSetProcessor( String fieldName, Object fieldValue, @@ -273,4 +264,18 @@ private static Processor createSetProcessor( ignoreEmptyValue ); } + + private static void assertMapEquals(Object actual, Object expected) { + if (expected instanceof Map expectedMap) { + Map actualMap = (Map) actual; + assertThat(actualMap.keySet().toArray(), arrayContainingInAnyOrder(expectedMap.keySet().toArray())); + for (Map.Entry entry : actualMap.entrySet()) { + if (entry.getValue() instanceof Map) { + assertMapEquals(entry.getValue(), expectedMap.get(entry.getKey())); + } else { + assertThat(entry.getValue(), equalTo(expectedMap.get(entry.getKey()))); + } + } + } + } } diff --git a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/270_set_processor.yml b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/270_set_processor.yml index 594ff52c2b27a..f74e9a5752b80 100644 --- a/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/270_set_processor.yml +++ b/modules/ingest-common/src/yamlRestTest/resources/rest-api-spec/test/ingest/270_set_processor.yml @@ -227,3 +227,71 @@ teardown: - match: { _source.foo: "hello" } - match: { _source.method_call_is_ignored: "" } - match: { _source.missing_method_is_ignored: "" } + +--- +"Test set processor with mustache edge cases": + - do: + ingest.put_pipeline: + id: "1" + body: > + { + "processors" : [ + { + "script": { + "description": "Set a field 'foo' with a value of '{{bar}}' -- no mustache here, just strings", + "lang": "painless", + "source": "ctx.foo = '{{bar}}'" + } + }, + { + "set": { + "description": "Dereference the foo field via actual mustache", + "field": "result_1", + "value": "{{foo}}" + } + }, + { + "set": { + "description": "Dereference the foo field via copy_from", + "field": "result_2", + "copy_from": "foo" + } + }, + { + "set": { + "description": "Copy the original bar value into old_bar", + "field": "old_bar", + "copy_from": "bar" + } + }, + { + "set": { + "description": "Set whatever field value_bar refers to (it's bar) to 3", + "field": "{{value_bar}}", + "value": 3 + } + } + ] + } + - match: { acknowledged: true } + + - do: + index: + index: test + id: "1" + pipeline: "1" + body: { + foo: 1, + bar: 2, + value_bar: "bar" + } + + - do: + get: + index: test + id: "1" + - match: { _source.foo: "{{bar}}" } + - match: { _source.result_1: "{{bar}}" } + - match: { _source.result_2: "{{bar}}" } + - match: { _source.old_bar: 2 } + - match: { _source.bar: 3 } diff --git a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java index ab2d96c7d198d..ec17915f7d622 100644 --- a/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java +++ b/modules/ingest-geoip/src/main/java/org/elasticsearch/ingest/geoip/DatabaseNodeService.java @@ -464,15 +464,19 @@ void retrieveDatabase( // so it is ok if this happens in a blocking manner on a thread from generic thread pool. // This makes the code easier to understand and maintain. SearchResponse searchResponse = client.search(searchRequest).actionGet(); - SearchHit[] hits = searchResponse.getHits().getHits(); + try { + SearchHit[] hits = searchResponse.getHits().getHits(); - if (searchResponse.getHits().getHits().length == 0) { - failureHandler.accept(new ResourceNotFoundException("chunk document with id [" + id + "] not found")); - return; + if (searchResponse.getHits().getHits().length == 0) { + failureHandler.accept(new ResourceNotFoundException("chunk document with id [" + id + "] not found")); + return; + } + byte[] data = (byte[]) hits[0].getSourceAsMap().get("data"); + md.update(data); + chunkConsumer.accept(data); + } finally { + searchResponse.decRef(); } - byte[] data = (byte[]) hits[0].getSourceAsMap().get("data"); - md.update(data); - chunkConsumer.accept(data); } String actualMd5 = MessageDigests.toHexString(md.digest()); if (Objects.equals(expectedMd5, actualMd5)) { diff --git a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java index 58089f792226a..c7dbee47ea823 100644 --- a/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java +++ b/modules/ingest-geoip/src/test/java/org/elasticsearch/ingest/geoip/DatabaseNodeServiceTests.java @@ -15,7 +15,6 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -38,6 +37,8 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.CheckedRunnable; +import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Releasables; import org.elasticsearch.core.Strings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; @@ -57,6 +58,7 @@ import org.junit.After; import org.junit.Before; import org.mockito.ArgumentCaptor; +import org.mockito.stubbing.Answer; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; @@ -68,11 +70,13 @@ import java.security.MessageDigest; import java.util.ArrayList; import java.util.Arrays; +import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; +import java.util.concurrent.CopyOnWriteArrayList; import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -109,6 +113,8 @@ public class DatabaseNodeServiceTests extends ESTestCase { private IngestService ingestService; private ClusterService clusterService; + private final Collection toRelease = new CopyOnWriteArrayList<>(); + @Before public void setup() throws IOException { final Path geoIpConfigDir = createTempDir(); @@ -133,6 +139,8 @@ public void setup() throws IOException { public void cleanup() { resourceWatcherService.close(); threadPool.shutdownNow(); + Releasables.close(toRelease); + toRelease.clear(); } public void testCheckDatabases() throws Exception { @@ -321,19 +329,14 @@ private String mockSearches(String databaseName, int firstChunk, int lastChunk) } SearchHits hits = new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f); - SearchResponse searchResponse = new SearchResponse( - new SearchResponseSections(hits, null, null, false, null, null, 0), - null, - 1, - 1, - 0, - 1L, - null, - null - ); + SearchResponse searchResponse = new SearchResponse(hits, null, null, false, null, null, 0, null, 1, 1, 0, 1L, null, null); + toRelease.add(searchResponse::decRef); @SuppressWarnings("unchecked") ActionFuture actionFuture = mock(ActionFuture.class); - when(actionFuture.actionGet()).thenReturn(searchResponse); + when(actionFuture.actionGet()).thenAnswer((Answer) invocation -> { + searchResponse.incRef(); + return searchResponse; + }); requestMap.put(databaseName + "_" + i, actionFuture); } when(client.search(any())).thenAnswer(invocationOnMock -> { diff --git a/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/DeviceTypeParser.java b/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/DeviceTypeParser.java index 40e54c8fe5f7e..6117ebc6aa319 100644 --- a/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/DeviceTypeParser.java +++ b/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/DeviceTypeParser.java @@ -39,23 +39,24 @@ public class DeviceTypeParser { private final HashMap> deviceTypePatterns = new HashMap<>(); public void init(InputStream regexStream) throws IOException { - XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML) - .createParser(XContentParserConfiguration.EMPTY, regexStream); - - XContentParser.Token token = yamlParser.nextToken(); - - if (token == XContentParser.Token.START_OBJECT) { - token = yamlParser.nextToken(); - - for (; token != null; token = yamlParser.nextToken()) { - String currentName = yamlParser.currentName(); - if (token == XContentParser.Token.FIELD_NAME && patternListKeys.contains(currentName)) { - List> parserConfigurations = readParserConfigurations(yamlParser); - ArrayList subPatterns = new ArrayList<>(); - for (Map map : parserConfigurations) { - subPatterns.add(new DeviceTypeSubPattern(Pattern.compile((map.get("regex"))), map.get("replacement"))); + try ( + XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML) + .createParser(XContentParserConfiguration.EMPTY, regexStream) + ) { + XContentParser.Token token = yamlParser.nextToken(); + if (token == XContentParser.Token.START_OBJECT) { + token = yamlParser.nextToken(); + + for (; token != null; token = yamlParser.nextToken()) { + String currentName = yamlParser.currentName(); + if (token == XContentParser.Token.FIELD_NAME && patternListKeys.contains(currentName)) { + List> parserConfigurations = readParserConfigurations(yamlParser); + ArrayList subPatterns = new ArrayList<>(); + for (Map map : parserConfigurations) { + subPatterns.add(new DeviceTypeSubPattern(Pattern.compile((map.get("regex"))), map.get("replacement"))); + } + deviceTypePatterns.put(currentName, subPatterns); } - deviceTypePatterns.put(currentName, subPatterns); } } } diff --git a/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentParser.java b/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentParser.java index 41ced0c7ff4cc..515c31735c313 100644 --- a/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentParser.java +++ b/modules/ingest-user-agent/src/main/java/org/elasticsearch/ingest/useragent/UserAgentParser.java @@ -48,59 +48,62 @@ final class UserAgentParser { private void init(InputStream regexStream) throws IOException { // EMPTY is safe here because we don't use namedObject - XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML) - .createParser(XContentParserConfiguration.EMPTY, regexStream); + try ( + XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML) + .createParser(XContentParserConfiguration.EMPTY, regexStream) + ) { - XContentParser.Token token = yamlParser.nextToken(); + XContentParser.Token token = yamlParser.nextToken(); - if (token == XContentParser.Token.START_OBJECT) { - token = yamlParser.nextToken(); + if (token == XContentParser.Token.START_OBJECT) { + token = yamlParser.nextToken(); - for (; token != null; token = yamlParser.nextToken()) { - if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("user_agent_parsers")) { - List> parserConfigurations = readParserConfigurations(yamlParser); - - for (Map map : parserConfigurations) { - uaPatterns.add( - new UserAgentSubpattern( - compilePattern(map.get("regex"), map.get("regex_flag")), - map.get("family_replacement"), - map.get("v1_replacement"), - map.get("v2_replacement"), - map.get("v3_replacement"), - map.get("v4_replacement") - ) - ); - } - } else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("os_parsers")) { - List> parserConfigurations = readParserConfigurations(yamlParser); - - for (Map map : parserConfigurations) { - osPatterns.add( - new UserAgentSubpattern( - compilePattern(map.get("regex"), map.get("regex_flag")), - map.get("os_replacement"), - map.get("os_v1_replacement"), - map.get("os_v2_replacement"), - map.get("os_v3_replacement"), - map.get("os_v4_replacement") - ) - ); - } - } else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("device_parsers")) { - List> parserConfigurations = readParserConfigurations(yamlParser); - - for (Map map : parserConfigurations) { - devicePatterns.add( - new UserAgentSubpattern( - compilePattern(map.get("regex"), map.get("regex_flag")), - map.get("device_replacement"), - null, - null, - null, - null - ) - ); + for (; token != null; token = yamlParser.nextToken()) { + if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("user_agent_parsers")) { + List> parserConfigurations = readParserConfigurations(yamlParser); + + for (Map map : parserConfigurations) { + uaPatterns.add( + new UserAgentSubpattern( + compilePattern(map.get("regex"), map.get("regex_flag")), + map.get("family_replacement"), + map.get("v1_replacement"), + map.get("v2_replacement"), + map.get("v3_replacement"), + map.get("v4_replacement") + ) + ); + } + } else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("os_parsers")) { + List> parserConfigurations = readParserConfigurations(yamlParser); + + for (Map map : parserConfigurations) { + osPatterns.add( + new UserAgentSubpattern( + compilePattern(map.get("regex"), map.get("regex_flag")), + map.get("os_replacement"), + map.get("os_v1_replacement"), + map.get("os_v2_replacement"), + map.get("os_v3_replacement"), + map.get("os_v4_replacement") + ) + ); + } + } else if (token == XContentParser.Token.FIELD_NAME && yamlParser.currentName().equals("device_parsers")) { + List> parserConfigurations = readParserConfigurations(yamlParser); + + for (Map map : parserConfigurations) { + devicePatterns.add( + new UserAgentSubpattern( + compilePattern(map.get("regex"), map.get("regex_flag")), + map.get("device_replacement"), + null, + null, + null, + null + ) + ); + } } } } diff --git a/modules/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/DeviceTypeParserTests.java b/modules/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/DeviceTypeParserTests.java index 6543ef2095b87..582a40fb8a210 100644 --- a/modules/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/DeviceTypeParserTests.java +++ b/modules/ingest-user-agent/src/test/java/org/elasticsearch/ingest/useragent/DeviceTypeParserTests.java @@ -31,36 +31,39 @@ public class DeviceTypeParserTests extends ESTestCase { private static DeviceTypeParser deviceTypeParser; private ArrayList> readTestDevices(InputStream regexStream, String keyName) throws IOException { - XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML) - .createParser(XContentParserConfiguration.EMPTY, regexStream); + try ( + XContentParser yamlParser = XContentFactory.xContent(XContentType.YAML) + .createParser(XContentParserConfiguration.EMPTY, regexStream) + ) { - XContentParser.Token token = yamlParser.nextToken(); + XContentParser.Token token = yamlParser.nextToken(); - ArrayList> testDevices = new ArrayList<>(); + ArrayList> testDevices = new ArrayList<>(); - if (token == XContentParser.Token.START_OBJECT) { - token = yamlParser.nextToken(); + if (token == XContentParser.Token.START_OBJECT) { + token = yamlParser.nextToken(); - for (; token != null; token = yamlParser.nextToken()) { - String currentName = yamlParser.currentName(); - if (token == XContentParser.Token.FIELD_NAME && currentName.equals(keyName)) { - List> parserConfigurations = readParserConfigurations(yamlParser); + for (; token != null; token = yamlParser.nextToken()) { + String currentName = yamlParser.currentName(); + if (token == XContentParser.Token.FIELD_NAME && currentName.equals(keyName)) { + List> parserConfigurations = readParserConfigurations(yamlParser); - for (Map map : parserConfigurations) { - HashMap testDevice = new HashMap<>(); + for (Map map : parserConfigurations) { + HashMap testDevice = new HashMap<>(); - testDevice.put("type", map.get("type")); - testDevice.put("os", map.get("os")); - testDevice.put("browser", map.get("browser")); - testDevice.put("device", map.get("device")); - testDevices.add(testDevice); + testDevice.put("type", map.get("type")); + testDevice.put("os", map.get("os")); + testDevice.put("browser", map.get("browser")); + testDevice.put("device", map.get("device")); + testDevices.add(testDevice); + } } } } - } - return testDevices; + return testDevices; + } } private static VersionedName getVersionName(String name) { diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java index d9e346454aefe..0c3376c9c8a90 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/MultiSearchTemplateIT.java @@ -31,6 +31,7 @@ import java.util.Map; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; @@ -141,42 +142,43 @@ public void testBasic() throws Exception { search5.setScriptParams(params5); multiRequest.add(search5); - MultiSearchTemplateResponse response = client().execute(MustachePlugin.MULTI_SEARCH_TEMPLATE_ACTION, multiRequest).get(); - assertThat(response.getResponses(), arrayWithSize(5)); - assertThat(response.getTook().millis(), greaterThan(0L)); - - MultiSearchTemplateResponse.Item response1 = response.getResponses()[0]; - assertThat(response1.isFailure(), is(false)); - SearchTemplateResponse searchTemplateResponse1 = response1.getResponse(); - assertThat(searchTemplateResponse1.hasResponse(), is(true)); - assertHitCount(searchTemplateResponse1.getResponse(), (numDocs / 2) + (numDocs % 2)); - assertThat(searchTemplateResponse1.getSource().utf8ToString(), equalTo(""" - {"query":{"match":{"odd":"true"}}}""")); - - MultiSearchTemplateResponse.Item response2 = response.getResponses()[1]; - assertThat(response2.isFailure(), is(false)); - SearchTemplateResponse searchTemplateResponse2 = response2.getResponse(); - assertThat(searchTemplateResponse2.hasResponse(), is(false)); - assertThat(searchTemplateResponse2.getSource().utf8ToString(), equalTo(""" - {"query":{"match_phrase_prefix":{"message":"quick brown f"}}}""")); - - MultiSearchTemplateResponse.Item response3 = response.getResponses()[2]; - assertThat(response3.isFailure(), is(false)); - SearchTemplateResponse searchTemplateResponse3 = response3.getResponse(); - assertThat(searchTemplateResponse3.hasResponse(), is(true)); - assertHitCount(searchTemplateResponse3.getResponse(), (numDocs / 2)); - assertThat(searchTemplateResponse3.getSource().utf8ToString(), equalTo(""" - {"query":{"term":{"odd":"false"}}}""")); - - MultiSearchTemplateResponse.Item response4 = response.getResponses()[3]; - assertThat(response4.isFailure(), is(true)); - assertThat(response4.getFailure(), instanceOf(IndexNotFoundException.class)); - assertThat(response4.getFailure().getMessage(), equalTo("no such index [unknown]")); - - MultiSearchTemplateResponse.Item response5 = response.getResponses()[4]; - assertThat(response5.isFailure(), is(true)); - assertNull(response5.getResponse()); - assertThat(response5.getFailure(), instanceOf(XContentParseException.class)); + assertResponse(client().execute(MustachePlugin.MULTI_SEARCH_TEMPLATE_ACTION, multiRequest), response -> { + assertThat(response.getResponses(), arrayWithSize(5)); + assertThat(response.getTook().millis(), greaterThan(0L)); + + MultiSearchTemplateResponse.Item response1 = response.getResponses()[0]; + assertThat(response1.isFailure(), is(false)); + SearchTemplateResponse searchTemplateResponse1 = response1.getResponse(); + assertThat(searchTemplateResponse1.hasResponse(), is(true)); + assertHitCount(searchTemplateResponse1.getResponse(), (numDocs / 2) + (numDocs % 2)); + assertThat(searchTemplateResponse1.getSource().utf8ToString(), equalTo(""" + {"query":{"match":{"odd":"true"}}}""")); + + MultiSearchTemplateResponse.Item response2 = response.getResponses()[1]; + assertThat(response2.isFailure(), is(false)); + SearchTemplateResponse searchTemplateResponse2 = response2.getResponse(); + assertThat(searchTemplateResponse2.hasResponse(), is(false)); + assertThat(searchTemplateResponse2.getSource().utf8ToString(), equalTo(""" + {"query":{"match_phrase_prefix":{"message":"quick brown f"}}}""")); + + MultiSearchTemplateResponse.Item response3 = response.getResponses()[2]; + assertThat(response3.isFailure(), is(false)); + SearchTemplateResponse searchTemplateResponse3 = response3.getResponse(); + assertThat(searchTemplateResponse3.hasResponse(), is(true)); + assertHitCount(searchTemplateResponse3.getResponse(), (numDocs / 2)); + assertThat(searchTemplateResponse3.getSource().utf8ToString(), equalTo(""" + {"query":{"term":{"odd":"false"}}}""")); + + MultiSearchTemplateResponse.Item response4 = response.getResponses()[3]; + assertThat(response4.isFailure(), is(true)); + assertThat(response4.getFailure(), instanceOf(IndexNotFoundException.class)); + assertThat(response4.getFailure().getMessage(), equalTo("no such index [unknown]")); + + MultiSearchTemplateResponse.Item response5 = response.getResponses()[4]; + assertThat(response5.isFailure(), is(true)); + assertNull(response5.getResponse()); + assertThat(response5.getFailure(), instanceOf(XContentParseException.class)); + }); } /** @@ -193,21 +195,24 @@ public void testCCSCheckCompatibility() throws Exception { searchTemplateRequest.setRequest(new SearchRequest()); MultiSearchTemplateRequest request = new MultiSearchTemplateRequest(); request.add(searchTemplateRequest); - MultiSearchTemplateResponse multiSearchTemplateResponse = client().execute(MustachePlugin.MULTI_SEARCH_TEMPLATE_ACTION, request) - .get(); - Item response = multiSearchTemplateResponse.getResponses()[0]; - assertTrue(response.isFailure()); - Exception ex = response.getFailure(); - assertThat(ex.getMessage(), containsString("[class org.elasticsearch.action.search.SearchRequest] is not compatible with version")); - assertThat(ex.getMessage(), containsString("'search.check_ccs_compatibility' setting is enabled.")); - - String expectedCause = Strings.format( - "[fail_before_current_version] was released first in version %s, failed compatibility " - + "check trying to send it to node with version %s", - FailBeforeCurrentVersionQueryBuilder.FUTURE_VERSION, - TransportVersions.MINIMUM_CCS_VERSION - ); - String actualCause = ex.getCause().getMessage(); - assertEquals(expectedCause, actualCause); + assertResponse(client().execute(MustachePlugin.MULTI_SEARCH_TEMPLATE_ACTION, request), multiSearchTemplateResponse -> { + Item response = multiSearchTemplateResponse.getResponses()[0]; + assertTrue(response.isFailure()); + Exception ex = response.getFailure(); + assertThat( + ex.getMessage(), + containsString("[class org.elasticsearch.action.search.SearchRequest] is not compatible with version") + ); + assertThat(ex.getMessage(), containsString("'search.check_ccs_compatibility' setting is enabled.")); + + String expectedCause = Strings.format( + "[fail_before_current_version] was released first in version %s, failed compatibility " + + "check trying to send it to node with version %s", + FailBeforeCurrentVersionQueryBuilder.FUTURE_VERSION, + TransportVersions.MINIMUM_CCS_VERSION + ); + String actualCause = ex.getCause().getMessage(); + assertEquals(expectedCause, actualCause); + }); } } diff --git a/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java index 77480e6bc9e63..510ff01cf93f7 100644 --- a/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java +++ b/modules/lang-mustache/src/internalClusterTest/java/org/elasticsearch/script/mustache/SearchTemplateIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.search.DummyQueryParserPlugin; import org.elasticsearch.search.SearchService; import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; @@ -32,7 +33,7 @@ import java.util.concurrent.ExecutionException; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; -import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertResponse; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -77,13 +78,13 @@ public void testSearchRequestFail() throws Exception { .get() ); - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(searchRequest) - .setScript(query) - .setScriptType(ScriptType.INLINE) - .setScriptParams(Collections.singletonMap("my_size", 1)) - .get(); - - assertThat(searchResponse.getResponse().getHits().getHits().length, equalTo(1)); + assertResponse( + new SearchTemplateRequestBuilder(client()).setRequest(searchRequest) + .setScript(query) + .setScriptType(ScriptType.INLINE) + .setScriptParams(Collections.singletonMap("my_size", 1)), + searchResponse -> assertThat(searchResponse.getResponse().getHits().getHits().length, equalTo(1)) + ); } /** @@ -101,8 +102,10 @@ public void testTemplateQueryAsEscapedString() throws Exception { }"""; SearchTemplateRequest request = SearchTemplateRequest.fromXContent(createParser(JsonXContent.jsonXContent, query)); request.setRequest(searchRequest); - SearchTemplateResponse searchResponse = client().execute(MustachePlugin.SEARCH_TEMPLATE_ACTION, request).get(); - assertThat(searchResponse.getResponse().getHits().getHits().length, equalTo(1)); + assertResponse( + client().execute(MustachePlugin.SEARCH_TEMPLATE_ACTION, request), + searchResponse -> assertThat(searchResponse.getResponse().getHits().getHits().length, equalTo(1)) + ); } /** @@ -122,8 +125,10 @@ public void testTemplateQueryAsEscapedStringStartingWithConditionalClause() thro }"""; SearchTemplateRequest request = SearchTemplateRequest.fromXContent(createParser(JsonXContent.jsonXContent, templateString)); request.setRequest(searchRequest); - SearchTemplateResponse searchResponse = client().execute(MustachePlugin.SEARCH_TEMPLATE_ACTION, request).get(); - assertThat(searchResponse.getResponse().getHits().getHits().length, equalTo(1)); + assertResponse( + client().execute(MustachePlugin.SEARCH_TEMPLATE_ACTION, request), + searchResponse -> assertThat(searchResponse.getResponse().getHits().getHits().length, equalTo(1)) + ); } /** @@ -143,8 +148,10 @@ public void testTemplateQueryAsEscapedStringWithConditionalClauseAtEnd() throws }"""; SearchTemplateRequest request = SearchTemplateRequest.fromXContent(createParser(JsonXContent.jsonXContent, templateString)); request.setRequest(searchRequest); - SearchTemplateResponse searchResponse = client().execute(MustachePlugin.SEARCH_TEMPLATE_ACTION, request).get(); - assertThat(searchResponse.getResponse().getHits().getHits().length, equalTo(1)); + assertResponse( + client().execute(MustachePlugin.SEARCH_TEMPLATE_ACTION, request), + searchResponse -> assertThat(searchResponse.getResponse().getHits().getHits().length, equalTo(1)) + ); } public void testIndexedTemplateClient() throws Exception { @@ -177,12 +184,13 @@ public void testIndexedTemplateClient() throws Exception { Map templateParams = new HashMap<>(); templateParams.put("fieldParam", "foo"); - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test")) - .setScript("testTemplate") - .setScriptType(ScriptType.STORED) - .setScriptParams(templateParams) - .get(); - assertHitCount(searchResponse.getResponse(), 4); + assertHitCount( + new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test")) + .setScript("testTemplate") + .setScriptType(ScriptType.STORED) + .setScriptParams(templateParams), + 4 + ); assertAcked(clusterAdmin().prepareDeleteStoredScript("testTemplate")); @@ -273,13 +281,13 @@ public void testIndexedTemplate() throws Exception { Map templateParams = new HashMap<>(); templateParams.put("fieldParam", "foo"); - - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest().indices("test")) - .setScript("1a") - .setScriptType(ScriptType.STORED) - .setScriptParams(templateParams) - .get(); - assertHitCount(searchResponse.getResponse(), 4); + assertHitCount( + new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest().indices("test")) + .setScript("1a") + .setScriptType(ScriptType.STORED) + .setScriptParams(templateParams), + 4 + ); expectThrows( ResourceNotFoundException.class, @@ -291,12 +299,13 @@ public void testIndexedTemplate() throws Exception { ); templateParams.put("fieldParam", "bar"); - searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test")) - .setScript("2") - .setScriptType(ScriptType.STORED) - .setScriptParams(templateParams) - .get(); - assertHitCount(searchResponse.getResponse(), 1); + assertHitCount( + new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test")) + .setScript("2") + .setScriptType(ScriptType.STORED) + .setScriptParams(templateParams), + 1 + ); } // Relates to #10397 @@ -352,13 +361,14 @@ public void testIndexedTemplateOverwrite() throws Exception { .setId("git01") .setContent(new BytesArray(query.replace("{{slop}}", Integer.toString(0))), XContentType.JSON) ); - - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("testindex")) - .setScript("git01") - .setScriptType(ScriptType.STORED) - .setScriptParams(templateParams) - .get(); - assertHitCount(searchResponse.getResponse(), 1); + assertHitCount( + new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("testindex")) + .setScript("git01") + .setScript("git01") + .setScriptType(ScriptType.STORED) + .setScriptParams(templateParams), + 1 + ); } } @@ -394,12 +404,13 @@ public void testIndexedTemplateWithArray() throws Exception { String[] fieldParams = { "foo", "bar" }; arrayTemplateParams.put("fieldParam", fieldParams); - SearchTemplateResponse searchResponse = new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test")) - .setScript("4") - .setScriptType(ScriptType.STORED) - .setScriptParams(arrayTemplateParams) - .get(); - assertHitCount(searchResponse.getResponse(), 5); + assertHitCount( + new SearchTemplateRequestBuilder(client()).setRequest(new SearchRequest("test")) + .setScript("4") + .setScriptType(ScriptType.STORED) + .setScriptParams(arrayTemplateParams), + 5 + ); } /** @@ -435,4 +446,8 @@ public void testCCSCheckCompatibility() throws Exception { String actualCause = underlying.getMessage().replaceAll("\\d{7,}", "XXXXXXX"); assertEquals(expectedCause, actualCause); } + + public static void assertHitCount(SearchTemplateRequestBuilder requestBuilder, long expectedHitCount) { + assertResponse(requestBuilder, response -> ElasticsearchAssertions.assertHitCount(response.getResponse(), expectedHitCount)); + } } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java index a26352eb3d8c7..9bdabcede8ec6 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponse.java @@ -18,8 +18,11 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Nullable; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.transport.LeakTracker; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -99,6 +102,20 @@ public String toString() { private final Item[] items; private final long tookInMillis; + private final RefCounted refCounted = LeakTracker.wrap(new AbstractRefCounted() { + @Override + protected void closeInternal() { + for (int i = 0; i < items.length; i++) { + Item item = items[i]; + var r = item.response; + if (r != null) { + r.decRef(); + items[i] = null; + } + } + } + }); + MultiSearchTemplateResponse(StreamInput in) throws IOException { super(in); items = in.readArray(Item::new, Item[]::new); @@ -162,6 +179,26 @@ public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params par return builder; } + @Override + public void incRef() { + refCounted.incRef(); + } + + @Override + public boolean tryIncRef() { + return refCounted.tryIncRef(); + } + + @Override + public boolean decRef() { + return refCounted.decRef(); + } + + @Override + public boolean hasReferences() { + return refCounted.hasReferences(); + } + static final class Fields { static final String RESPONSES = "responses"; static final String STATUS = "status"; @@ -179,6 +216,7 @@ public static MultiSearchTemplateResponse fromXContext(XContentParser parser) { if (item.getResponse() != null) { stResponse = new SearchTemplateResponse(); stResponse.setResponse(item.getResponse()); + item.getResponse().incRef(); } templateResponses[i++] = new Item(stResponse, item.getFailure()); } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java index b4b804bf22e92..9451ac089476e 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/SearchTemplateResponse.java @@ -14,7 +14,10 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContent; +import org.elasticsearch.core.AbstractRefCounted; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.transport.LeakTracker; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -35,6 +38,15 @@ public class SearchTemplateResponse extends ActionResponse implements ToXContent /** Contains the search response, if any **/ private SearchResponse response; + private final RefCounted refCounted = LeakTracker.wrap(new AbstractRefCounted() { + @Override + protected void closeInternal() { + if (response != null) { + response.decRef(); + } + } + }); + SearchTemplateResponse() {} SearchTemplateResponse(StreamInput in) throws IOException { @@ -74,6 +86,26 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(response); } + @Override + public void incRef() { + refCounted.incRef(); + } + + @Override + public boolean tryIncRef() { + return refCounted.tryIncRef(); + } + + @Override + public boolean decRef() { + return refCounted.decRef(); + } + + @Override + public boolean hasReferences() { + return refCounted.hasReferences(); + } + public static SearchTemplateResponse fromXContent(XContentParser parser) throws IOException { SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse(); Map contentAsMap = parser.map(); @@ -85,11 +117,12 @@ public static SearchTemplateResponse fromXContent(XContentParser parser) throws } else { XContentType contentType = parser.contentType(); XContentBuilder builder = XContentFactory.contentBuilder(contentType).map(contentAsMap); - XContentParser searchResponseParser = contentType.xContent() - .createParser(parser.getXContentRegistry(), parser.getDeprecationHandler(), BytesReference.bytes(builder).streamInput()); - - SearchResponse searchResponse = SearchResponse.fromXContent(searchResponseParser); - searchTemplateResponse.setResponse(searchResponse); + try ( + XContentParser searchResponseParser = contentType.xContent() + .createParser(parser.getXContentRegistry(), parser.getDeprecationHandler(), BytesReference.bytes(builder).streamInput()) + ) { + searchTemplateResponse.setResponse(SearchResponse.fromXContent(searchResponseParser)); + } } return searchTemplateResponse; } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java index 4b0c365ba8b13..11871978e433a 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportMultiSearchTemplateAction.java @@ -80,6 +80,7 @@ protected void doExecute(Task task, MultiSearchTemplateRequest request, ActionLi try { searchRequest = convert(searchTemplateRequest, searchTemplateResponse, scriptService, xContentRegistry, searchUsageHolder); } catch (Exception e) { + searchTemplateResponse.decRef(); items[i] = new MultiSearchTemplateResponse.Item(null, e); if (ExceptionsHelper.status(e).getStatus() >= 500 && ExceptionsHelper.isNodeOrShardUnavailableTypeException(e) == false) { logger.warn("MultiSearchTemplate convert failure", e); @@ -98,12 +99,17 @@ protected void doExecute(Task task, MultiSearchTemplateRequest request, ActionLi MultiSearchResponse.Item item = r.getResponses()[i]; int originalSlot = originalSlots.get(i); if (item.isFailure()) { + var existing = items[originalSlot]; + if (existing.getResponse() != null) { + existing.getResponse().decRef(); + } items[originalSlot] = new MultiSearchTemplateResponse.Item(null, item.getFailure()); } else { items[originalSlot].getResponse().setResponse(item.getResponse()); + item.getResponse().incRef(); } } - l.onResponse(new MultiSearchTemplateResponse(items, r.getTook().millis())); + ActionListener.respondAndRelease(l, new MultiSearchTemplateResponse(items, r.getTook().millis())); })); } } diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java index 2b315f48dcce4..c6bd2afc64d21 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/TransportSearchTemplateAction.java @@ -71,18 +71,29 @@ public TransportSearchTemplateAction( @Override protected void doExecute(Task task, SearchTemplateRequest request, ActionListener listener) { final SearchTemplateResponse response = new SearchTemplateResponse(); + boolean success = false; try { SearchRequest searchRequest = convert(request, response, scriptService, xContentRegistry, searchUsageHolder); if (searchRequest != null) { - client.search(searchRequest, listener.delegateFailureAndWrap((l, searchResponse) -> { + client.search(searchRequest, listener.delegateResponse((l, e) -> { + response.decRef(); + l.onFailure(e); + }).delegateFailureAndWrap((l, searchResponse) -> { response.setResponse(searchResponse); - l.onResponse(response); + searchResponse.incRef(); + ActionListener.respondAndRelease(l, response); })); + success = true; } else { - listener.onResponse(response); + success = true; + ActionListener.respondAndRelease(listener, response); } } catch (IOException e) { listener.onFailure(e); + } finally { + if (success == false) { + response.decRef(); + } } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java index 3db0d12216e54..03f2fbd3e81a7 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/MultiSearchTemplateResponseTests.java @@ -11,7 +11,8 @@ import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.Strings; -import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.core.RefCounted; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; @@ -38,11 +39,9 @@ protected MultiSearchTemplateResponse createTestInstance() { int totalShards = randomIntBetween(1, Integer.MAX_VALUE); int successfulShards = randomIntBetween(0, totalShards); int skippedShards = totalShards - successfulShards; - InternalSearchResponse internalSearchResponse = InternalSearchResponse.EMPTY_WITH_TOTAL_HITS; SearchResponse.Clusters clusters = randomClusters(); SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse(); - SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, + SearchResponse searchResponse = SearchResponseUtils.emptyWithTotalHits( null, totalShards, successfulShards, @@ -75,11 +74,9 @@ private static MultiSearchTemplateResponse createTestInstanceWithFailures() { int totalShards = randomIntBetween(1, Integer.MAX_VALUE); int successfulShards = randomIntBetween(0, totalShards); int skippedShards = totalShards - successfulShards; - InternalSearchResponse internalSearchResponse = InternalSearchResponse.EMPTY_WITH_TOTAL_HITS; SearchResponse.Clusters clusters = randomClusters(); SearchTemplateResponse searchTemplateResponse = new SearchTemplateResponse(); - SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, + SearchResponse searchResponse = SearchResponseUtils.emptyWithTotalHits( null, totalShards, successfulShards, @@ -150,7 +147,13 @@ public void testFromXContentWithFailures() throws IOException { this::doParseInstance, this::assertEqualInstances, assertToXContentEquivalence, - ToXContent.EMPTY_PARAMS + ToXContent.EMPTY_PARAMS, + RefCounted::decRef ); } + + @Override + protected void dispose(MultiSearchTemplateResponse instance) { + instance.decRef(); + } } diff --git a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java index d3f23d3f4a21c..73c8887669a02 100644 --- a/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java +++ b/modules/lang-mustache/src/test/java/org/elasticsearch/script/mustache/SearchTemplateResponseTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.AbstractXContentTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -54,10 +54,8 @@ private static SearchResponse createSearchResponse() { int totalShards = randomIntBetween(1, Integer.MAX_VALUE); int successfulShards = randomIntBetween(0, totalShards); int skippedShards = randomIntBetween(0, totalShards); - InternalSearchResponse internalSearchResponse = InternalSearchResponse.EMPTY_WITH_TOTAL_HITS; - return new SearchResponse( - internalSearchResponse, + return SearchResponseUtils.emptyWithTotalHits( null, totalShards, successfulShards, @@ -127,33 +125,36 @@ protected boolean supportsUnknownFields() { public void testSourceToXContent() throws IOException { SearchTemplateResponse response = new SearchTemplateResponse(); + try { + XContentBuilder source = XContentFactory.jsonBuilder() + .startObject() + .startObject("query") + .startObject("terms") + .field("status", new String[] { "pending", "published" }) + .endObject() + .endObject() + .endObject(); + response.setSource(BytesReference.bytes(source)); + + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType) + .startObject() + .startObject("template_output") + .startObject("query") + .startObject("terms") + .field("status", new String[] { "pending", "published" }) + .endObject() + .endObject() + .endObject() + .endObject(); - XContentBuilder source = XContentFactory.jsonBuilder() - .startObject() - .startObject("query") - .startObject("terms") - .field("status", new String[] { "pending", "published" }) - .endObject() - .endObject() - .endObject(); - response.setSource(BytesReference.bytes(source)); - - XContentType contentType = randomFrom(XContentType.values()); - XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType) - .startObject() - .startObject("template_output") - .startObject("query") - .startObject("terms") - .field("status", new String[] { "pending", "published" }) - .endObject() - .endObject() - .endObject() - .endObject(); - - XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType); - response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS); - - assertToXContentEquivalent(BytesReference.bytes(expectedResponse), BytesReference.bytes(actualResponse), contentType); + XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType); + response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS); + + assertToXContentEquivalent(BytesReference.bytes(expectedResponse), BytesReference.bytes(actualResponse), contentType); + } finally { + response.decRef(); + } } public void testSearchResponseToXContent() throws IOException { @@ -161,17 +162,14 @@ public void testSearchResponseToXContent() throws IOException { hit.score(2.0f); SearchHit[] hits = new SearchHit[] { hit }; - InternalSearchResponse internalSearchResponse = new InternalSearchResponse( + SearchResponse searchResponse = new SearchResponse( new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f), null, null, - null, false, null, - 1 - ); - SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, + null, + 1, null, 0, 0, @@ -182,37 +180,46 @@ public void testSearchResponseToXContent() throws IOException { ); SearchTemplateResponse response = new SearchTemplateResponse(); - response.setResponse(searchResponse); - - XContentType contentType = randomFrom(XContentType.values()); - XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType) - .startObject() - .field("took", 0) - .field("timed_out", false) - .startObject("_shards") - .field("total", 0) - .field("successful", 0) - .field("skipped", 0) - .field("failed", 0) - .endObject() - .startObject("hits") - .startObject("total") - .field("value", 100) - .field("relation", "eq") - .endObject() - .field("max_score", 1.5F) - .startArray("hits") - .startObject() - .field("_id", "id") - .field("_score", 2.0F) - .endObject() - .endArray() - .endObject() - .endObject(); - - XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType); - response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS); - - assertToXContentEquivalent(BytesReference.bytes(expectedResponse), BytesReference.bytes(actualResponse), contentType); + try { + response.setResponse(searchResponse); + + XContentType contentType = randomFrom(XContentType.values()); + XContentBuilder expectedResponse = XContentFactory.contentBuilder(contentType) + .startObject() + .field("took", 0) + .field("timed_out", false) + .startObject("_shards") + .field("total", 0) + .field("successful", 0) + .field("skipped", 0) + .field("failed", 0) + .endObject() + .startObject("hits") + .startObject("total") + .field("value", 100) + .field("relation", "eq") + .endObject() + .field("max_score", 1.5F) + .startArray("hits") + .startObject() + .field("_id", "id") + .field("_score", 2.0F) + .endObject() + .endArray() + .endObject() + .endObject(); + + XContentBuilder actualResponse = XContentFactory.contentBuilder(contentType); + response.toXContent(actualResponse, ToXContent.EMPTY_PARAMS); + + assertToXContentEquivalent(BytesReference.bytes(expectedResponse), BytesReference.bytes(actualResponse), contentType); + } finally { + response.decRef(); + } + } + + @Override + protected void dispose(SearchTemplateResponse instance) { + instance.decRef(); } } diff --git a/modules/lang-painless/src/doc/java/org/elasticsearch/painless/ContextGeneratorCommon.java b/modules/lang-painless/src/doc/java/org/elasticsearch/painless/ContextGeneratorCommon.java index c54214e5f854d..b8390f6aab75c 100644 --- a/modules/lang-painless/src/doc/java/org/elasticsearch/painless/ContextGeneratorCommon.java +++ b/modules/lang-painless/src/doc/java/org/elasticsearch/painless/ContextGeneratorCommon.java @@ -34,15 +34,21 @@ public class ContextGeneratorCommon { @SuppressForbidden(reason = "retrieving data from an internal API not exposed as part of the REST client") + @SuppressWarnings("unchecked") public static List getContextInfos() throws IOException { URLConnection getContextNames = new URL("http://" + System.getProperty("cluster.uri") + "/_scripts/painless/_context") .openConnection(); - XContentParser parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, getContextNames.getInputStream()); - parser.nextToken(); - parser.nextToken(); - @SuppressWarnings("unchecked") - List contextNames = (List) (Object) parser.list(); - parser.close(); + List contextNames; + try ( + XContentParser parser = JsonXContent.jsonXContent.createParser( + XContentParserConfiguration.EMPTY, + getContextNames.getInputStream() + ) + ) { + parser.nextToken(); + parser.nextToken(); + contextNames = (List) (Object) parser.list(); + } ((HttpURLConnection) getContextNames).disconnect(); List contextInfos = new ArrayList<>(); @@ -51,9 +57,10 @@ public static List getContextInfos() throws IOException { URLConnection getContextInfo = new URL( "http://" + System.getProperty("cluster.uri") + "/_scripts/painless/_context?context=" + contextName ).openConnection(); - parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, getContextInfo.getInputStream()); - contextInfos.add(PainlessContextInfo.fromXContent(parser)); - ((HttpURLConnection) getContextInfo).disconnect(); + try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, getContextInfo.getInputStream())) { + contextInfos.add(PainlessContextInfo.fromXContent(parser)); + ((HttpURLConnection) getContextInfo).disconnect(); + } } contextInfos.sort(Comparator.comparing(PainlessContextInfo::getName)); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java index b7c790fe1d0ad..7b84e3c9f1417 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java @@ -13,6 +13,7 @@ import org.elasticsearch.painless.Compiler.Loader; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupBuilder; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.spi.Whitelist; import org.elasticsearch.painless.symbol.ScriptScope; import org.elasticsearch.script.ScriptContext; @@ -84,9 +85,11 @@ public PainlessScriptEngine(Settings settings, Map, List, Compiler> mutableContextsToCompilers = new HashMap<>(); Map, PainlessLookup> mutableContextsToLookups = new HashMap<>(); + final Map dedup = new HashMap<>(); + final Map filteredMethodCache = new HashMap<>(); for (Map.Entry, List> entry : contexts.entrySet()) { ScriptContext context = entry.getKey(); - PainlessLookup lookup = PainlessLookupBuilder.buildFromWhitelists(entry.getValue()); + PainlessLookup lookup = PainlessLookupBuilder.buildFromWhitelists(entry.getValue(), dedup, filteredMethodCache); mutableContextsToCompilers.put( context, new Compiler(context.instanceClazz, context.factoryClazz, context.statefulFactoryClazz, lookup) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/api/Json.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/api/Json.java index b2993d6169336..f121894cf4dc5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/api/Json.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/api/Json.java @@ -20,16 +20,16 @@ public class Json { * Load a string as the Java version of a JSON type, either List (JSON array), Map (JSON object), Number, Boolean or String */ public static Object load(String json) throws IOException { - XContentParser parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, json); - - return switch (parser.nextToken()) { - case START_ARRAY -> parser.list(); - case START_OBJECT -> parser.map(); - case VALUE_NUMBER -> parser.numberValue(); - case VALUE_BOOLEAN -> parser.booleanValue(); - case VALUE_STRING -> parser.text(); - default -> null; - }; + try (XContentParser parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, json)) { + return switch (parser.nextToken()) { + case START_ARRAY -> parser.list(); + case START_OBJECT -> parser.map(); + case VALUE_NUMBER -> parser.numberValue(); + case VALUE_BOOLEAN -> parser.booleanValue(); + case VALUE_STRING -> parser.text(); + default -> null; + }; + } } /** diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java index d32639bf3968f..e6f7c1a3bb617 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java @@ -49,7 +49,7 @@ public final class PainlessClass { this.getterMethodHandles = Map.copyOf(getterMethodHandles); this.setterMethodHandles = Map.copyOf(setterMethodHandles); - this.runtimeMethods = Map.copyOf(runtimeMethods); + this.runtimeMethods = runtimeMethods.equals(methods) ? this.methods : Map.copyOf(runtimeMethods); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index c775ba4b5b9b6..5bf8e5cde2afb 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -163,18 +163,6 @@ public PainlessMethod lookupPainlessMethod(Class targetClass, boolean isStati return lookupPainlessObject(targetClass, objectLookup); } - public List lookupPainlessSubClassesMethod(String targetCanonicalClassName, String methodName, int methodArity) { - Objects.requireNonNull(targetCanonicalClassName); - - Class targetClass = canonicalTypeNameToType(targetCanonicalClassName); - - if (targetClass == null) { - return null; - } - - return lookupPainlessSubClassesMethod(targetClass, methodName, methodArity); - } - public List lookupPainlessSubClassesMethod(Class targetClass, String methodName, int methodArity) { Objects.requireNonNull(targetClass); Objects.requireNonNull(methodName); @@ -218,18 +206,6 @@ public List lookupPainlessSubClassesMethod(Class targetClass, return subMethods; } - public PainlessField lookupPainlessField(String targetCanonicalClassName, boolean isStatic, String fieldName) { - Objects.requireNonNull(targetCanonicalClassName); - - Class targetClass = canonicalTypeNameToType(targetCanonicalClassName); - - if (targetClass == null) { - return null; - } - - return lookupPainlessField(targetClass, isStatic, fieldName); - } - public PainlessField lookupPainlessField(Class targetClass, boolean isStatic, String fieldName) { Objects.requireNonNull(targetClass); Objects.requireNonNull(fieldName); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index 0c1497b541954..d3f14b08c7dc2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -56,18 +56,14 @@ public final class PainlessLookupBuilder { - private static final Map painlessConstructorCache = new HashMap<>(); - private static final Map painlessMethodCache = new HashMap<>(); - private static final Map painlessFieldCache = new HashMap<>(); - private static final Map painlessClassBindingCache = new HashMap<>(); - private static final Map painlessInstanceBindingCache = new HashMap<>(); - private static final Map painlessFilteredCache = new HashMap<>(); - private static final Pattern CLASS_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); - private static final Pattern METHOD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); - private static final Pattern FIELD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); + private static final Pattern METHOD_AND_FIELD_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][_a-zA-Z0-9]*$"); - public static PainlessLookup buildFromWhitelists(List whitelists) { + public static PainlessLookup buildFromWhitelists( + List whitelists, + Map dedup, + Map filteredMethodCache + ) { PainlessLookupBuilder painlessLookupBuilder = new PainlessLookupBuilder(); String origin = "internal error"; @@ -92,7 +88,8 @@ public static PainlessLookup buildFromWhitelists(List whitelists) { painlessLookupBuilder.addPainlessConstructor( targetCanonicalClassName, whitelistConstructor.canonicalTypeNameParameters, - whitelistConstructor.painlessAnnotations + whitelistConstructor.painlessAnnotations, + dedup ); } @@ -105,7 +102,8 @@ public static PainlessLookup buildFromWhitelists(List whitelists) { whitelistMethod.methodName, whitelistMethod.returnCanonicalTypeName, whitelistMethod.canonicalTypeNameParameters, - whitelistMethod.painlessAnnotations + whitelistMethod.painlessAnnotations, + dedup ); } @@ -116,7 +114,8 @@ public static PainlessLookup buildFromWhitelists(List whitelists) { targetCanonicalClassName, whitelistField.fieldName, whitelistField.canonicalTypeNameParameter, - whitelistField.painlessAnnotations + whitelistField.painlessAnnotations, + dedup ); } } @@ -129,7 +128,8 @@ public static PainlessLookup buildFromWhitelists(List whitelists) { whitelistStatic.methodName, whitelistStatic.returnCanonicalTypeName, whitelistStatic.canonicalTypeNameParameters, - whitelistStatic.painlessAnnotations + whitelistStatic.painlessAnnotations, + dedup ); } @@ -141,7 +141,8 @@ public static PainlessLookup buildFromWhitelists(List whitelists) { whitelistClassBinding.methodName, whitelistClassBinding.returnCanonicalTypeName, whitelistClassBinding.canonicalTypeNameParameters, - whitelistClassBinding.painlessAnnotations + whitelistClassBinding.painlessAnnotations, + dedup ); } @@ -152,7 +153,8 @@ public static PainlessLookup buildFromWhitelists(List whitelists) { whitelistInstanceBinding.methodName, whitelistInstanceBinding.returnCanonicalTypeName, whitelistInstanceBinding.canonicalTypeNameParameters, - whitelistInstanceBinding.painlessAnnotations + whitelistInstanceBinding.painlessAnnotations, + dedup ); } } @@ -160,7 +162,7 @@ public static PainlessLookup buildFromWhitelists(List whitelists) { throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception); } - return painlessLookupBuilder.build(); + return painlessLookupBuilder.build(dedup, filteredMethodCache); } // javaClassNamesToClasses is all the classes that need to be available to the custom classloader @@ -269,7 +271,7 @@ private static IllegalArgumentException lookupException(Throwable cause, String return new IllegalArgumentException(Strings.format(formatText, args), cause); } - public void addPainlessClass(Class clazz, Map, Object> annotations) { + private void addPainlessClass(Class clazz, Map, Object> annotations) { Objects.requireNonNull(clazz); Objects.requireNonNull(annotations); @@ -355,10 +357,11 @@ public void addPainlessClass(Class clazz, Map, Object> annotations) } } - public void addPainlessConstructor( + private void addPainlessConstructor( String targetCanonicalClassName, List canonicalTypeNameParameters, - Map, Object> annotations + Map, Object> annotations, + Map dedup ) { Objects.requireNonNull(targetCanonicalClassName); Objects.requireNonNull(canonicalTypeNameParameters); @@ -391,10 +394,15 @@ public void addPainlessConstructor( typeParameters.add(typeParameter); } - addPainlessConstructor(targetClass, typeParameters, annotations); + addPainlessConstructor(targetClass, typeParameters, annotations, dedup); } - public void addPainlessConstructor(Class targetClass, List> typeParameters, Map, Object> annotations) { + private void addPainlessConstructor( + Class targetClass, + List> typeParameters, + Map, Object> annotations, + Map dedup + ) { Objects.requireNonNull(targetClass); Objects.requireNonNull(typeParameters); @@ -473,7 +481,7 @@ public void addPainlessConstructor(Class targetClass, List> typePara ); if (existingPainlessConstructor == null) { - newPainlessConstructor = painlessConstructorCache.computeIfAbsent(newPainlessConstructor, Function.identity()); + newPainlessConstructor = (PainlessConstructor) dedup.computeIfAbsent(newPainlessConstructor, Function.identity()); painlessClassBuilder.constructors.put(painlessConstructorKey.intern(), newPainlessConstructor); } else if (newPainlessConstructor.equals(existingPainlessConstructor) == false) { throw lookupException( @@ -486,14 +494,15 @@ public void addPainlessConstructor(Class targetClass, List> typePara } } - public void addPainlessMethod( + private void addPainlessMethod( ClassLoader classLoader, String targetCanonicalClassName, String augmentedCanonicalClassName, String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters, - Map, Object> annotations + Map, Object> annotations, + Map dedup ) { Objects.requireNonNull(classLoader); @@ -561,7 +570,7 @@ public void addPainlessMethod( ); } - addPainlessMethod(targetClass, augmentedClass, methodName, returnType, typeParameters, annotations); + addPainlessMethod(targetClass, augmentedClass, methodName, returnType, typeParameters, annotations, dedup); } public void addPainlessMethod( @@ -570,7 +579,8 @@ public void addPainlessMethod( String methodName, Class returnType, List> typeParameters, - Map, Object> annotations + Map, Object> annotations, + Map dedup ) { Objects.requireNonNull(targetClass); @@ -585,7 +595,7 @@ public void addPainlessMethod( String targetCanonicalClassName = typeToCanonicalTypeName(targetClass); - if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) { + if (METHOD_AND_FIELD_NAME_PATTERN.matcher(methodName).matches() == false) { throw new IllegalArgumentException( "invalid method name [" + methodName + "] for target class [" + targetCanonicalClassName + "]." ); @@ -748,7 +758,7 @@ public void addPainlessMethod( ); if (existingPainlessMethod == null) { - newPainlessMethod = painlessMethodCache.computeIfAbsent(newPainlessMethod, key -> key); + newPainlessMethod = (PainlessMethod) dedup.computeIfAbsent(newPainlessMethod, Function.identity()); if (isStatic) { painlessClassBuilder.staticMethods.put(painlessMethodKey.intern(), newPainlessMethod); @@ -771,12 +781,13 @@ public void addPainlessMethod( } } - public void addPainlessField( + private void addPainlessField( ClassLoader classLoader, String targetCanonicalClassName, String fieldName, String canonicalTypeNameParameter, - Map, Object> annotations + Map, Object> annotations, + Map dedup ) { Objects.requireNonNull(classLoader); @@ -827,15 +838,16 @@ public void addPainlessField( ); } - addPainlessField(targetClass, augmentedClass, fieldName, typeParameter, annotations); + addPainlessField(targetClass, augmentedClass, fieldName, typeParameter, annotations, dedup); } - public void addPainlessField( + private void addPainlessField( Class targetClass, Class augmentedClass, String fieldName, Class typeParameter, - Map, Object> annotations + Map, Object> annotations, + Map dedup ) { Objects.requireNonNull(targetClass); @@ -849,7 +861,7 @@ public void addPainlessField( String targetCanonicalClassName = typeToCanonicalTypeName(targetClass); - if (FIELD_NAME_PATTERN.matcher(fieldName).matches() == false) { + if (METHOD_AND_FIELD_NAME_PATTERN.matcher(fieldName).matches() == false) { throw new IllegalArgumentException( "invalid field name [" + fieldName + "] for target class [" + targetCanonicalClassName + "]." ); @@ -946,7 +958,7 @@ public void addPainlessField( PainlessField newPainlessField = new PainlessField(javaField, typeParameter, annotations, methodHandleGetter, null); if (existingPainlessField == null) { - newPainlessField = painlessFieldCache.computeIfAbsent(newPainlessField, Function.identity()); + newPainlessField = (PainlessField) dedup.computeIfAbsent(newPainlessField, Function.identity()); painlessClassBuilder.staticFields.put(painlessFieldKey.intern(), newPainlessField); } else if (newPainlessField.equals(existingPainlessField) == false) { throw lookupException( @@ -981,7 +993,7 @@ public void addPainlessField( ); if (existingPainlessField == null) { - newPainlessField = painlessFieldCache.computeIfAbsent(newPainlessField, key -> key); + newPainlessField = (PainlessField) dedup.computeIfAbsent(newPainlessField, Function.identity()); painlessClassBuilder.fields.put(painlessFieldKey.intern(), newPainlessField); } else if (newPainlessField.equals(existingPainlessField) == false) { throw lookupException( @@ -1004,7 +1016,8 @@ public void addImportedPainlessMethod( String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters, - Map, Object> annotations + Map, Object> annotations, + Map dedup ) { Objects.requireNonNull(classLoader); @@ -1046,7 +1059,7 @@ public void addImportedPainlessMethod( ); } - addImportedPainlessMethod(targetClass, methodName, returnType, typeParameters, annotations); + addImportedPainlessMethod(targetClass, methodName, returnType, typeParameters, annotations, dedup); } public void addImportedPainlessMethod( @@ -1054,7 +1067,8 @@ public void addImportedPainlessMethod( String methodName, Class returnType, List> typeParameters, - Map, Object> annotations + Map, Object> annotations, + Map dedup ) { Objects.requireNonNull(targetClass); Objects.requireNonNull(methodName); @@ -1077,7 +1091,7 @@ public void addImportedPainlessMethod( ); } - if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) { + if (METHOD_AND_FIELD_NAME_PATTERN.matcher(methodName).matches() == false) { throw new IllegalArgumentException( "invalid imported method name [" + methodName + "] for target class [" + targetCanonicalClassName + "]." ); @@ -1182,7 +1196,7 @@ public void addImportedPainlessMethod( ); if (existingImportedPainlessMethod == null) { - newImportedPainlessMethod = painlessMethodCache.computeIfAbsent(newImportedPainlessMethod, key -> key); + newImportedPainlessMethod = (PainlessMethod) dedup.computeIfAbsent(newImportedPainlessMethod, Function.identity()); painlessMethodKeysToImportedPainlessMethods.put(painlessMethodKey.intern(), newImportedPainlessMethod); } else if (newImportedPainlessMethod.equals(existingImportedPainlessMethod) == false) { throw lookupException( @@ -1206,7 +1220,8 @@ public void addPainlessClassBinding( String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters, - Map, Object> annotations + Map, Object> annotations, + Map dedup ) { Objects.requireNonNull(classLoader); @@ -1247,15 +1262,16 @@ public void addPainlessClassBinding( ); } - addPainlessClassBinding(targetClass, methodName, returnType, typeParameters, annotations); + addPainlessClassBinding(targetClass, methodName, returnType, typeParameters, annotations, dedup); } - public void addPainlessClassBinding( + private void addPainlessClassBinding( Class targetClass, String methodName, Class returnType, List> typeParameters, - Map, Object> annotations + Map, Object> annotations, + Map dedup ) { Objects.requireNonNull(targetClass); Objects.requireNonNull(methodName); @@ -1333,7 +1349,7 @@ public void addPainlessClassBinding( } } - if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) { + if (METHOD_AND_FIELD_NAME_PATTERN.matcher(methodName).matches() == false) { throw new IllegalArgumentException( "invalid method name [" + methodName + "] for class binding [" + targetCanonicalClassName + "]." ); @@ -1446,7 +1462,7 @@ public void addPainlessClassBinding( ); if (existingPainlessClassBinding == null) { - newPainlessClassBinding = painlessClassBindingCache.computeIfAbsent(newPainlessClassBinding, Function.identity()); + newPainlessClassBinding = (PainlessClassBinding) dedup.computeIfAbsent(newPainlessClassBinding, Function.identity()); painlessMethodKeysToPainlessClassBindings.put(painlessMethodKey.intern(), newPainlessClassBinding); } else if (newPainlessClassBinding.equals(existingPainlessClassBinding) == false) { throw lookupException( @@ -1469,7 +1485,8 @@ public void addPainlessInstanceBinding( String methodName, String returnCanonicalTypeName, List canonicalTypeNameParameters, - Map, Object> painlessAnnotations + Map, Object> painlessAnnotations, + Map dedup ) { Objects.requireNonNull(targetInstance); @@ -1509,7 +1526,7 @@ public void addPainlessInstanceBinding( ); } - addPainlessInstanceBinding(targetInstance, methodName, returnType, typeParameters, painlessAnnotations); + addPainlessInstanceBinding(targetInstance, methodName, returnType, typeParameters, painlessAnnotations, dedup); } public void addPainlessInstanceBinding( @@ -1517,7 +1534,8 @@ public void addPainlessInstanceBinding( String methodName, Class returnType, List> typeParameters, - Map, Object> painlessAnnotations + Map, Object> painlessAnnotations, + Map dedup ) { Objects.requireNonNull(targetInstance); Objects.requireNonNull(methodName); @@ -1542,7 +1560,7 @@ public void addPainlessInstanceBinding( ); } - if (METHOD_NAME_PATTERN.matcher(methodName).matches() == false) { + if (METHOD_AND_FIELD_NAME_PATTERN.matcher(methodName).matches() == false) { throw new IllegalArgumentException( "invalid method name [" + methodName + "] for instance binding [" + targetCanonicalClassName + "]." ); @@ -1629,7 +1647,7 @@ public void addPainlessInstanceBinding( ); if (existingPainlessInstanceBinding == null) { - newPainlessInstanceBinding = painlessInstanceBindingCache.computeIfAbsent(newPainlessInstanceBinding, key -> key); + newPainlessInstanceBinding = (PainlessInstanceBinding) dedup.computeIfAbsent(newPainlessInstanceBinding, Function.identity()); painlessMethodKeysToPainlessInstanceBindings.put(painlessMethodKey.intern(), newPainlessInstanceBinding); } else if (newPainlessInstanceBinding.equals(existingPainlessInstanceBinding) == false) { throw lookupException( @@ -1649,16 +1667,19 @@ public void addPainlessInstanceBinding( } } - public PainlessLookup build() { + public PainlessLookup build(Map dedup, Map filteredMethodCache) { buildPainlessClassHierarchy(); setFunctionalInterfaceMethods(); - generateRuntimeMethods(); + generateRuntimeMethods(filteredMethodCache); cacheRuntimeHandles(); Map, PainlessClass> classesToPainlessClasses = Maps.newMapWithExpectedSize(classesToPainlessClassBuilders.size()); for (Map.Entry, PainlessClassBuilder> painlessClassBuilderEntry : classesToPainlessClassBuilders.entrySet()) { - classesToPainlessClasses.put(painlessClassBuilderEntry.getKey(), painlessClassBuilderEntry.getValue().build()); + classesToPainlessClasses.put( + painlessClassBuilderEntry.getKey(), + (PainlessClass) dedup.computeIfAbsent(painlessClassBuilderEntry.getValue().build(), Function.identity()) + ); } if (javaClassNamesToClasses.values().containsAll(canonicalClassNamesToClasses.values()) == false) { @@ -1817,7 +1838,7 @@ private void setFunctionalInterfaceMethod(Class targetClass, PainlessClassBui * {@link Map}. The {@link PainlessClass#runtimeMethods} {@link Map} is used exclusively to look up methods at * run-time resulting from calls with a def type value target. */ - private void generateRuntimeMethods() { + private void generateRuntimeMethods(Map filteredMethodCache) { for (Map.Entry, PainlessClassBuilder> painlessClassBuilderEntry : classesToPainlessClassBuilders.entrySet()) { Class targetClass = painlessClassBuilderEntry.getKey(); PainlessClassBuilder painlessClassBuilder = painlessClassBuilderEntry.getValue(); @@ -1832,7 +1853,7 @@ private void generateRuntimeMethods() { || typeParameter == Long.class || typeParameter == Float.class || typeParameter == Double.class) { - generateFilteredMethod(targetClass, painlessClassBuilder, painlessMethod); + generateFilteredMethod(targetClass, painlessClassBuilder, painlessMethod, filteredMethodCache); } } } @@ -1842,10 +1863,11 @@ private void generateRuntimeMethods() { private static void generateFilteredMethod( Class targetClass, PainlessClassBuilder painlessClassBuilder, - PainlessMethod painlessMethod + PainlessMethod painlessMethod, + Map filteredMethodCache ) { String painlessMethodKey = buildPainlessMethodKey(painlessMethod.javaMethod().getName(), painlessMethod.typeParameters().size()); - PainlessMethod filteredPainlessMethod = painlessFilteredCache.get(painlessMethod); + PainlessMethod filteredPainlessMethod = filteredMethodCache.get(painlessMethod); if (filteredPainlessMethod == null) { Method javaMethod = painlessMethod.javaMethod(); @@ -1899,7 +1921,7 @@ private static void generateFilteredMethod( Map.of() ); painlessClassBuilder.runtimeMethods.put(painlessMethodKey.intern(), filteredPainlessMethod); - painlessFilteredCache.put(painlessMethod, filteredPainlessMethod); + filteredMethodCache.put(painlessMethod, filteredPainlessMethod); } catch (Exception exception) { throw new IllegalStateException( "internal error occurred attempting to generate a runtime method [" + painlessMethodKey + "]", diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AliasTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AliasTests.java index f9d87f5ce46b8..2ccc70685e6d7 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AliasTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AliasTests.java @@ -34,7 +34,9 @@ public void testNoShadowing() { IllegalArgumentException err = expectThrows( IllegalArgumentException.class, () -> PainlessLookupBuilder.buildFromWhitelists( - List.of(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.alias-shadow")) + List.of(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.alias-shadow")), + new HashMap<>(), + new HashMap<>() ) ); assertEquals( diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java index 48da785e801d3..7b0b3b500d12c 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.script.ScriptException; import java.io.IOException; +import java.util.HashMap; import java.util.Map; import static java.util.Collections.singletonList; @@ -26,7 +27,11 @@ import static org.hamcrest.Matchers.not; public class DebugTests extends ScriptTestCase { - private final PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists(PainlessPlugin.BASE_WHITELISTS); + private final PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists( + PainlessPlugin.BASE_WHITELISTS, + new HashMap<>(), + new HashMap<>() + ); public void testExplain() { // Debug.explain can explain an object diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java index 04577d8ca9d81..b44be595b4178 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java @@ -19,6 +19,7 @@ import java.io.PrintWriter; import java.io.StringWriter; +import java.util.HashMap; import java.util.List; /** quick and dirty tools for debugging */ @@ -35,12 +36,8 @@ static String toString(Class iface, String source, CompilerSettings settings, PrintWriter outputWriter = new PrintWriter(output); Textifier textifier = new Textifier(); try { - new Compiler(iface, null, null, PainlessLookupBuilder.buildFromWhitelists(whitelists)).compile( - "", - source, - settings, - textifier - ); + new Compiler(iface, null, null, PainlessLookupBuilder.buildFromWhitelists(whitelists, new HashMap<>(), new HashMap<>())) + .compile("", source, settings, textifier); } catch (RuntimeException e) { textifier.print(outputWriter); e.addSuppressed(new Exception("current bytecode: \n" + output)); @@ -65,15 +62,8 @@ private static String tree( PrintWriter outputWriter = new PrintWriter(output); Textifier textifier = new Textifier(); try { - new Compiler(iface, null, null, PainlessLookupBuilder.buildFromWhitelists(whitelists)).compile( - "", - source, - settings, - textifier, - semanticPhaseVisitor, - irPhaseVisitor, - asmPhaseVisitor - ); + new Compiler(iface, null, null, PainlessLookupBuilder.buildFromWhitelists(whitelists, new HashMap<>(), new HashMap<>())) + .compile("", source, settings, textifier, semanticPhaseVisitor, irPhaseVisitor, asmPhaseVisitor); } catch (RuntimeException e) { textifier.print(outputWriter); e.addSuppressed(new Exception("current bytecode: \n" + output)); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java index fc42caa364588..ed7fef33302bb 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java @@ -22,7 +22,11 @@ import java.util.HashMap; public class DefBootstrapTests extends ESTestCase { - private final PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists(PainlessPlugin.BASE_WHITELISTS); + private final PainlessLookup painlessLookup = PainlessLookupBuilder.buildFromWhitelists( + PainlessPlugin.BASE_WHITELISTS, + new HashMap<>(), + new HashMap<>() + ); /** calls toString() on integers, twice */ public void testOneType() throws Throwable { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LookupTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LookupTests.java index c04dc4cd2f893..6a403c0692540 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/LookupTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/LookupTests.java @@ -16,6 +16,7 @@ import org.junit.Before; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Set; @@ -26,7 +27,9 @@ public class LookupTests extends ESTestCase { @Before public void setup() { painlessLookup = PainlessLookupBuilder.buildFromWhitelists( - Collections.singletonList(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.lookup")) + Collections.singletonList(WhitelistLoader.loadFromResourceFiles(PainlessPlugin.class, "org.elasticsearch.painless.lookup")), + new HashMap<>(), + new HashMap<>() ); } diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java index 5ef5eb6c0b5b8..4fa1d7b7a3108 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/GeoJsonShapeParserTests.java @@ -181,34 +181,37 @@ public void testParseMultiDimensionShapes() throws IOException { .endArray() .endObject(); - XContentParser parser = createParser(pointGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); - assertNull(parser.nextToken()); + XContentBuilder lineGeoJson; + try (XContentParser parser = createParser(pointGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); - // multi dimension linestring - XContentBuilder lineGeoJson = XContentFactory.jsonBuilder() - .startObject() - .field("type", "LineString") - .startArray("coordinates") - .startArray() - .value(100.0) - .value(0.0) - .value(15.0) - .endArray() - .startArray() - .value(101.0) - .value(1.0) - .value(18.0) - .value(19.0) - .endArray() - .endArray() - .endObject(); + // multi dimension linestring + lineGeoJson = XContentFactory.jsonBuilder() + .startObject() + .field("type", "LineString") + .startArray("coordinates") + .startArray() + .value(100.0) + .value(0.0) + .value(15.0) + .endArray() + .startArray() + .value(101.0) + .value(1.0) + .value(18.0) + .value(19.0) + .endArray() + .endArray() + .endObject(); + } - parser = createParser(lineGeoJson); - parser.nextToken(); - ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); - assertNull(parser.nextToken()); + try (var parser = createParser(lineGeoJson)) { + parser.nextToken(); + ElasticsearchGeoAssertions.assertValidException(parser, ElasticsearchParseException.class); + assertNull(parser.nextToken()); + } } @Override diff --git a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeQueryTests.java b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeQueryTests.java index 6ad4d2c06c6d4..4e06a37ec7f20 100644 --- a/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeQueryTests.java +++ b/modules/legacy-geo/src/test/java/org/elasticsearch/legacygeo/search/LegacyGeoShapeQueryTests.java @@ -99,7 +99,7 @@ public void testPointsOnlyExplicit() throws Exception { .get(); // test that point was inserted - assertHitCount(client().prepareSearch("geo_points_only").setQuery(matchAllQuery()).get(), 2L); + assertHitCount(client().prepareSearch("geo_points_only").setQuery(matchAllQuery()), 2L); } public void testPointsOnly() throws Exception { diff --git a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/MatchOnlyTextMapperIT.java b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/MatchOnlyTextMapperIT.java index 0430fe3404f91..c387ff2b2134b 100644 --- a/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/MatchOnlyTextMapperIT.java +++ b/modules/mapper-extras/src/internalClusterTest/java/org/elasticsearch/index/mapper/MatchOnlyTextMapperIT.java @@ -55,7 +55,7 @@ public void testHighlightingWithMatchOnlyTextFieldMatchPhrase() throws IOExcepti .startObject() .field( "message", - "[.ds-.slm-history-5-2023.09.20-" + "[.ds-.slm-history-7-2023.09.20-" + randomInt() + "][0] marking and sending shard failed due to [failed recovery]" ) @@ -104,7 +104,7 @@ public void testHighlightingWithMatchOnlyTextFieldSyntheticSource() throws IOExc .startObject() .field( "message", - "[.ds-.slm-history-5-2023.09.20-" + "[.ds-.slm-history-7-2023.09.20-" + randomInt() + "][0] marking and sending shard failed due to [failed recovery]" ) diff --git a/modules/reindex/src/main/java/org/elasticsearch/reindex/Reindexer.java b/modules/reindex/src/main/java/org/elasticsearch/reindex/Reindexer.java index 4c3206e82b8d6..dbe1968bb076a 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/reindex/Reindexer.java +++ b/modules/reindex/src/main/java/org/elasticsearch/reindex/Reindexer.java @@ -36,6 +36,7 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.VersionType; @@ -60,7 +61,6 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; -import java.io.InputStream; import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.List; @@ -321,8 +321,11 @@ protected RequestWrapper buildRequest(ScrollableHitSource.Hit doc) if (mainRequestXContentType != null && doc.getXContentType() != mainRequestXContentType) { // we need to convert try ( - InputStream stream = doc.getSource().streamInput(); - XContentParser parser = sourceXContentType.xContent().createParser(XContentParserConfiguration.EMPTY, stream); + XContentParser parser = XContentHelper.createParserNotCompressed( + XContentParserConfiguration.EMPTY, + doc.getSource(), + sourceXContentType + ); XContentBuilder builder = XContentBuilder.builder(mainRequestXContentType.xContent()) ) { parser.nextToken(); diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java index 3c5a3eb2e40f9..956d90c5d041c 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/AsyncBulkByScrollActionTests.java @@ -66,7 +66,6 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskManager; @@ -574,9 +573,14 @@ protected RequestWrapper buildRequest(Hit doc) { new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0 ); - InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1); SearchResponse searchResponse = new SearchResponse( - internalResponse, + hits, + null, + null, + false, + false, + null, + 1, scrollId(), 5, 4, diff --git a/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java index 0ad1867e75058..7ac50eb0e7c6c 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/reindex/ClientScrollableHitSourceTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.index.reindex.ScrollableHitSource; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -127,6 +126,7 @@ private void dotestBasicsWithRetry(int retries, int minFailures, int maxFailures ++expectedSearchRetries; } + searchResponse.decRef(); searchResponse = createSearchResponse(); client.respond(TransportSearchScrollAction.TYPE, searchResponse); } @@ -168,9 +168,14 @@ private SearchResponse createSearchResponse() { new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0 ); - InternalSearchResponse internalResponse = new InternalSearchResponse(hits, null, null, null, false, false, 1); return new SearchResponse( - internalResponse, + hits, + null, + null, + false, + false, + null, + 1, randomSimpleString(random(), 1, 10), 5, 4, diff --git a/modules/repository-url/build.gradle b/modules/repository-url/build.gradle index 2850aee68a2fb..3537d430e212b 100644 --- a/modules/repository-url/build.gradle +++ b/modules/repository-url/build.gradle @@ -44,14 +44,3 @@ tasks.named("thirdPartyAudit").configure { ) } -//File repositoryDir = fixture.fsRepositoryDir as File - -testClusters.configureEach { - // repositoryDir is used by a FS repository to create snapshots - setting 'path.repo', "${repositoryDir.absolutePath}", PropertyNormalization.IGNORE_VALUE - // repositoryDir is used by two URL repositories to restore snapshots - setting 'repositories.url.allowed_urls', { - "http://snapshot.test*,${fixtureAddress('url-fixture')}" - }, PropertyNormalization.IGNORE_VALUE -} - diff --git a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java index 61ef5f1973854..3c869a89cfaa9 100644 --- a/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java +++ b/modules/transport-netty4/src/internalClusterTest/java/org/elasticsearch/transport/netty4/ESLoggingHandlerIT.java @@ -11,6 +11,8 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.elasticsearch.ESNetty4IntegTestCase; +import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest; +import org.elasticsearch.action.admin.cluster.node.hotthreads.TransportNodesHotThreadsAction; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.MockLogAppender; @@ -19,6 +21,7 @@ import org.elasticsearch.transport.TransportLogger; import java.io.IOException; +import java.util.concurrent.TimeUnit; @ESIntegTestCase.ClusterScope(numDataNodes = 2, scope = ESIntegTestCase.Scope.TEST) public class ESLoggingHandlerIT extends ESNetty4IntegTestCase { @@ -84,7 +87,7 @@ public void testLoggingHandler() { appender.addExpectation(writeExpectation); appender.addExpectation(flushExpectation); appender.addExpectation(readExpectation); - clusterAdmin().prepareNodesHotThreads().get(); + client().execute(TransportNodesHotThreadsAction.TYPE, new NodesHotThreadsRequest()).actionGet(10, TimeUnit.SECONDS); appender.assertAllExpectationsMatched(); } diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java index d662003530c22..65276c04bed56 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/plugin/analysis/kuromoji/KuromojiTokenizerFactory.java @@ -12,7 +12,6 @@ import org.apache.lucene.analysis.ja.JapaneseTokenizer; import org.apache.lucene.analysis.ja.JapaneseTokenizer.Mode; import org.apache.lucene.analysis.ja.dict.UserDictionary; -import org.apache.lucene.analysis.util.CSVUtil; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; @@ -23,10 +22,8 @@ import java.io.IOException; import java.io.Reader; import java.io.StringReader; -import java.util.HashSet; import java.util.List; import java.util.Locale; -import java.util.Set; public class KuromojiTokenizerFactory extends AbstractTokenizerFactory { @@ -60,11 +57,10 @@ public static UserDictionary getUserDictionary(Environment env, Settings setting "It is not allowed to use [" + USER_DICT_PATH_OPTION + "] in conjunction" + " with [" + USER_DICT_RULES_OPTION + "]" ); } - List ruleList = Analysis.getWordList(env, settings, USER_DICT_PATH_OPTION, USER_DICT_RULES_OPTION, false); + List ruleList = Analysis.getWordList(env, settings, USER_DICT_PATH_OPTION, USER_DICT_RULES_OPTION, false, true); if (ruleList == null || ruleList.isEmpty()) { return null; } - validateDuplicatedWords(ruleList); StringBuilder sb = new StringBuilder(); for (String line : ruleList) { sb.append(line).append(System.lineSeparator()); @@ -76,23 +72,6 @@ public static UserDictionary getUserDictionary(Environment env, Settings setting } } - private static void validateDuplicatedWords(List ruleList) { - Set dup = new HashSet<>(); - int lineNum = 0; - for (String line : ruleList) { - // ignore comments - if (line.startsWith("#") == false) { - String[] values = CSVUtil.parse(line); - if (dup.add(values[0]) == false) { - throw new IllegalArgumentException( - "Found duplicate term [" + values[0] + "] in user dictionary " + "at line [" + lineNum + "]" - ); - } - } - ++lineNum; - } - } - public static JapaneseTokenizer.Mode getMode(Settings settings) { String modeSetting = settings.get("mode", JapaneseTokenizer.DEFAULT_MODE.name()); return JapaneseTokenizer.Mode.valueOf(modeSetting.toUpperCase(Locale.ENGLISH)); diff --git a/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriAnalyzerProvider.java b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriAnalyzerProvider.java index f2949e45964a4..65c9bb9833177 100644 --- a/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriAnalyzerProvider.java +++ b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriAnalyzerProvider.java @@ -30,7 +30,7 @@ public class NoriAnalyzerProvider extends AbstractIndexAnalyzerProvider tagList = Analysis.getWordList(env, settings, "stoptags"); final Set stopTags = tagList != null ? resolvePOSList(tagList) : KoreanPartOfSpeechStopFilter.DEFAULT_STOP_TAGS; analyzer = new KoreanAnalyzer(userDictionary, mode, stopTags, false); diff --git a/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriTokenizerFactory.java b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriTokenizerFactory.java index c0be8322ade95..eedb4c2011af3 100644 --- a/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriTokenizerFactory.java +++ b/plugins/analysis-nori/src/main/java/org/elasticsearch/plugin/analysis/nori/NoriTokenizerFactory.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AbstractTokenizerFactory; import org.elasticsearch.index.analysis.Analysis; @@ -24,6 +25,8 @@ import java.util.List; import java.util.Locale; +import static org.elasticsearch.index.IndexVersions.UPGRADE_LUCENE_9_9_1; + public class NoriTokenizerFactory extends AbstractTokenizerFactory { private static final String USER_DICT_PATH_OPTION = "user_dictionary"; private static final String USER_DICT_RULES_OPTION = "user_dictionary_rules"; @@ -35,17 +38,24 @@ public class NoriTokenizerFactory extends AbstractTokenizerFactory { public NoriTokenizerFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, settings, name); decompoundMode = getMode(settings); - userDictionary = getUserDictionary(env, settings); + userDictionary = getUserDictionary(env, settings, indexSettings); discardPunctuation = settings.getAsBoolean("discard_punctuation", true); } - public static UserDictionary getUserDictionary(Environment env, Settings settings) { + public static UserDictionary getUserDictionary(Environment env, Settings settings, IndexSettings indexSettings) { if (settings.get(USER_DICT_PATH_OPTION) != null && settings.get(USER_DICT_RULES_OPTION) != null) { throw new IllegalArgumentException( "It is not allowed to use [" + USER_DICT_PATH_OPTION + "] in conjunction" + " with [" + USER_DICT_RULES_OPTION + "]" ); } - List ruleList = Analysis.getWordList(env, settings, USER_DICT_PATH_OPTION, USER_DICT_RULES_OPTION, true); + List ruleList = Analysis.getWordList( + env, + settings, + USER_DICT_PATH_OPTION, + USER_DICT_RULES_OPTION, + true, + isSupportDuplicateCheck(indexSettings) + ); if (ruleList == null || ruleList.isEmpty()) { return null; } @@ -60,6 +70,24 @@ public static UserDictionary getUserDictionary(Environment env, Settings setting } } + /** + * Determines if the specified index version supports duplicate checks. + * This method checks if the version of the index where it was created + * is at Version 8.13.0 or above. + * The feature of duplicate checks is introduced starting + * from version 8.13.0, hence any versions earlier than this do not support duplicate checks. + * + * @param indexSettings The settings of the index in question. + * @return Returns true if the version is 8.13.0 or later which means + * that the duplicate check feature is supported. + */ + private static boolean isSupportDuplicateCheck(IndexSettings indexSettings) { + var idxVersion = indexSettings.getIndexVersionCreated(); + // Explicitly exclude the range of versions greater than NORI_DUPLICATES, that + // are also in 8.12. The only version in this range is UPGRADE_LUCENE_9_9_1. + return idxVersion.onOrAfter(IndexVersions.NORI_DUPLICATES) && idxVersion != UPGRADE_LUCENE_9_9_1; + } + public static KoreanTokenizer.DecompoundMode getMode(Settings settings) { String modeSetting = settings.get("decompound_mode", KoreanTokenizer.DEFAULT_DECOMPOUND.name()); return KoreanTokenizer.DecompoundMode.valueOf(modeSetting.toUpperCase(Locale.ENGLISH)); diff --git a/plugins/analysis-nori/src/test/java/org/elasticsearch/plugin/analysis/nori/NoriAnalysisTests.java b/plugins/analysis-nori/src/test/java/org/elasticsearch/plugin/analysis/nori/NoriAnalysisTests.java index e091813184472..642ed19c520d7 100644 --- a/plugins/analysis-nori/src/test/java/org/elasticsearch/plugin/analysis/nori/NoriAnalysisTests.java +++ b/plugins/analysis-nori/src/test/java/org/elasticsearch/plugin/analysis/nori/NoriAnalysisTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import org.elasticsearch.index.analysis.AnalysisTestsHelper; import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; @@ -117,6 +118,31 @@ public void testNoriAnalyzerInvalidUserDictOption() throws Exception { ); } + public void testNoriAnalyzerDuplicateUserDictRule() throws Exception { + Settings settings = Settings.builder() + .put("index.analysis.analyzer.my_analyzer.type", "nori") + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersions.NORI_DUPLICATES) + .putList("index.analysis.analyzer.my_analyzer.user_dictionary_rules", "c++", "C쁠쁠", "세종", "세종", "세종시 세종 시") + .build(); + + final IllegalArgumentException exc = expectThrows(IllegalArgumentException.class, () -> createTestAnalysis(settings)); + assertThat(exc.getMessage(), containsString("[세종] in user dictionary at line [3]")); + } + + public void testNoriAnalyzerDuplicateUserDictRuleWithLegacyVersion() throws IOException { + Settings settings = Settings.builder() + .put("index.analysis.analyzer.my_analyzer.type", "nori") + .put(IndexMetadata.SETTING_VERSION_CREATED, IndexVersions.V_8_10_0) + .putList("index.analysis.analyzer.my_analyzer.user_dictionary_rules", "c++", "C쁠쁠", "세종", "세종", "세종시 세종 시") + .build(); + + final TestAnalysis analysis = createTestAnalysis(settings); + Analyzer analyzer = analysis.indexAnalyzers.get("my_analyzer"); + try (TokenStream stream = analyzer.tokenStream("", "세종")) { + assertTokenStreamContents(stream, new String[] { "세종" }); + } + } + public void testNoriTokenizer() throws Exception { Settings settings = Settings.builder() .put("index.analysis.tokenizer.my_tokenizer.type", "nori_tokenizer") diff --git a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java index b818de468ea2c..7ac817d386daf 100644 --- a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java +++ b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/CcsCommonYamlTestSuiteIT.java @@ -25,7 +25,6 @@ import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.Strings; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.core.Tuple; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; @@ -159,19 +158,7 @@ public void initSearchClient() throws IOException { searchClient = buildClient(restClientSettings(), clusterHosts.toArray(new HttpHost[clusterHosts.size()])); adminSearchClient = buildClient(restAdminSettings(), clusterHosts.toArray(new HttpHost[clusterHosts.size()])); - Tuple versionVersionTuple = readVersionsFromCatNodes(adminSearchClient); - final Version esVersion = versionVersionTuple.v1(); - final String os = readOsFromNodesInfo(adminSearchClient); - - searchYamlTestClient = new TestCandidateAwareClient( - getRestSpec(), - searchClient, - hosts, - esVersion, - ESRestTestCase::clusterHasFeature, - os, - this::getClientBuilderWithSniffedHosts - ); + searchYamlTestClient = new TestCandidateAwareClient(getRestSpec(), searchClient, hosts, this::getClientBuilderWithSniffedHosts); // check that we have an established CCS connection Request request = new Request("GET", "_remote/info"); @@ -298,10 +285,22 @@ public static Iterable parameters() throws Exception { @Override protected ClientYamlTestExecutionContext createRestTestExecutionContext( ClientYamlTestCandidate clientYamlTestCandidate, - ClientYamlTestClient clientYamlTestClient + ClientYamlTestClient clientYamlTestClient, + final Version esVersion, + final Predicate clusterFeaturesPredicate, + final String os ) { // depending on the API called, we either return the client running against the "write" or the "search" cluster here - return new ClientYamlTestExecutionContext(clientYamlTestCandidate, clientYamlTestClient, randomizeContentType()) { + + // TODO: reconcile and provide unified features, os, version(s), based on both clientYamlTestClient and searchYamlTestClient + return new ClientYamlTestExecutionContext( + clientYamlTestCandidate, + clientYamlTestClient, + randomizeContentType(), + esVersion, + ESRestTestCase::clusterHasFeature, + os + ) { protected ClientYamlTestClient clientYamlTestClient(String apiName) { if (CCS_APIS.contains(apiName)) { return searchYamlTestClient; @@ -328,12 +327,9 @@ static class TestCandidateAwareClient extends ClientYamlTestClient { ClientYamlSuiteRestSpec restSpec, RestClient restClient, List hosts, - Version esVersion, - Predicate clusterFeaturesPredicate, - String os, CheckedSupplier clientBuilderWithSniffedNodes ) { - super(restSpec, restClient, hosts, esVersion, clusterFeaturesPredicate, os, clientBuilderWithSniffedNodes); + super(restSpec, restClient, hosts, clientBuilderWithSniffedNodes); } public void setTestCandidate(ClientYamlTestCandidate testCandidate) { diff --git a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java index 51d499db61932..05dea0e95445f 100644 --- a/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java +++ b/qa/ccs-common-rest/src/yamlRestTest/java/org/elasticsearch/test/rest/yaml/RcsCcsCommonYamlTestSuiteIT.java @@ -26,7 +26,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.core.Tuple; import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.FeatureFlag; import org.elasticsearch.test.cluster.local.LocalClusterConfigProvider; @@ -46,6 +45,7 @@ import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Predicate; import static java.util.Collections.unmodifiableList; import static org.elasticsearch.test.rest.yaml.CcsCommonYamlTestSuiteIT.CCS_APIS; @@ -221,19 +221,7 @@ public void initSearchClient() throws IOException { clusterHosts.toArray(new HttpHost[clusterHosts.size()]) ); - Tuple versionVersionTuple = readVersionsFromCatNodes(adminSearchClient); - final Version esVersion = versionVersionTuple.v1(); - final String os = readOsFromNodesInfo(adminSearchClient); - - searchYamlTestClient = new TestCandidateAwareClient( - getRestSpec(), - searchClient, - hosts, - esVersion, - ESRestTestCase::clusterHasFeature, - os, - this::getClientBuilderWithSniffedHosts - ); + searchYamlTestClient = new TestCandidateAwareClient(getRestSpec(), searchClient, hosts, this::getClientBuilderWithSniffedHosts); configureRemoteCluster(); // check that we have an established CCS connection @@ -282,10 +270,22 @@ public static Iterable parameters() throws Exception { @Override protected ClientYamlTestExecutionContext createRestTestExecutionContext( ClientYamlTestCandidate clientYamlTestCandidate, - ClientYamlTestClient clientYamlTestClient + ClientYamlTestClient clientYamlTestClient, + final Version esVersion, + final Predicate clusterFeaturesPredicate, + final String os ) { // depending on the API called, we either return the client running against the "write" or the "search" cluster here - return new ClientYamlTestExecutionContext(clientYamlTestCandidate, clientYamlTestClient, randomizeContentType()) { + + // TODO: reconcile and provide unified features, os, version(s), based on both clientYamlTestClient and searchYamlTestClient + return new ClientYamlTestExecutionContext( + clientYamlTestCandidate, + clientYamlTestClient, + randomizeContentType(), + esVersion, + ESRestTestCase::clusterHasFeature, + os + ) { protected ClientYamlTestClient clientYamlTestClient(String apiName) { if (CCS_APIS.contains(apiName)) { return searchYamlTestClient; diff --git a/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index b17b81b6ac188..45aed866dc086 100644 --- a/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/javaRestTest/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -37,7 +37,6 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.test.transport.MockTransportService; @@ -92,18 +91,15 @@ private static MockTransportService startTransport( TransportSearchAction.TYPE.name(), EsExecutors.DIRECT_EXECUTOR_SERVICE, SearchRequest::new, - (request, channel, task) -> { - InternalSearchResponse response = new InternalSearchResponse( + (request, channel, task) -> channel.sendResponse( + new SearchResponse( new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), InternalAggregations.EMPTY, null, - null, false, null, - 1 - ); - SearchResponse searchResponse = new SearchResponse( - response, + null, + 1, null, 1, 1, @@ -111,9 +107,8 @@ private static MockTransportService startTransport( 100, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY - ); - channel.sendResponse(searchResponse); - } + ) + ) ); newService.registerRequestHandler( ClusterStateAction.NAME, diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index db2904a53dd11..47f7bb488d83d 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -112,6 +112,7 @@ public class FullClusterRestartIT extends ParameterizedFullClusterRestartTestCas .setting("indices.memory.shard_inactive_time", "60m") .apply(() -> clusterConfig) .feature(FeatureFlag.TIME_SERIES_MODE) + .feature(FeatureFlag.FAILURE_STORE_ENABLED) .build(); @ClassRule diff --git a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java index 3b58cf932fa61..d75519002f92e 100644 --- a/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java +++ b/qa/full-cluster-restart/src/javaRestTest/java/org/elasticsearch/upgrades/QueryBuilderBWCIT.java @@ -52,6 +52,7 @@ import java.util.List; import java.util.Map; +import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; /** @@ -251,9 +252,9 @@ public void testQueryBuilderBWC() throws Exception { StreamInput input = new NamedWriteableAwareStreamInput(new InputStreamStreamInput(in), registry) ) { - @UpdateForV9 // always true + @UpdateForV9 // condition will always be true var originalClusterHasTransportVersion = parseLegacyVersion(getOldClusterVersion()).map( - v -> v.onOrAfter(Version.V_8_8_0) + v -> v.onOrAfter(VERSION_INTRODUCING_TRANSPORT_VERSIONS) ).orElse(true); final TransportVersion transportVersion; diff --git a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/MultiClusterSearchYamlTestSuiteIT.java b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/MultiClusterSearchYamlTestSuiteIT.java index 0f92a19098026..de1c75c4af834 100644 --- a/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/MultiClusterSearchYamlTestSuiteIT.java +++ b/qa/multi-cluster-search/src/test/java/org/elasticsearch/search/MultiClusterSearchYamlTestSuiteIT.java @@ -14,12 +14,15 @@ import org.apache.lucene.tests.util.TimeUnits; import org.elasticsearch.Version; +import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.ClientYamlTestCandidate; import org.elasticsearch.test.rest.yaml.ClientYamlTestClient; import org.elasticsearch.test.rest.yaml.ClientYamlTestExecutionContext; import org.elasticsearch.test.rest.yaml.ESClientYamlSuiteTestCase; import org.junit.BeforeClass; +import java.util.function.Predicate; + @TimeoutSuite(millis = 5 * TimeUnits.MINUTE) // to account for slow as hell VMs public class MultiClusterSearchYamlTestSuiteIT extends ESClientYamlSuiteTestCase { @@ -33,27 +36,31 @@ public static void determineRemoteClusterMinimumVersion() { } } + @Override protected ClientYamlTestExecutionContext createRestTestExecutionContext( ClientYamlTestCandidate clientYamlTestCandidate, - ClientYamlTestClient clientYamlTestClient + ClientYamlTestClient clientYamlTestClient, + final Version esVersion, + final Predicate clusterFeaturesPredicate, + final String os ) { - return new ClientYamlTestExecutionContext(clientYamlTestCandidate, clientYamlTestClient, randomizeContentType()) { + /* + * Since the esVersion is used to skip tests in ESClientYamlSuiteTestCase, we also take into account the + * remote cluster version here and return it if it is lower than the local client version. This is used to + * skip tests if some feature isn't available on the remote cluster yet. + */ + final Version commonEsVersion = remoteEsVersion != null && remoteEsVersion.before(esVersion) ? remoteEsVersion : esVersion; + + // TODO: same for os and features - /** - * Since the esVersion is used to skip tests in ESClientYamlSuiteTestCase, we also take into account the - * remote cluster version here and return it if it is lower than the local client version. This is used to - * skip tests if some feature isn't available on the remote cluster yet. - */ - @Override - public Version esVersion() { - Version clientEsVersion = clientYamlTestClient.getEsVersion(); - if (remoteEsVersion == null) { - return clientEsVersion; - } else { - return remoteEsVersion.before(clientEsVersion) ? remoteEsVersion : clientEsVersion; - } - } - }; + return new ClientYamlTestExecutionContext( + clientYamlTestCandidate, + clientYamlTestClient, + randomizeContentType(), + commonEsVersion, + ESRestTestCase::clusterHasFeature, + os + ); } @Override diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java index 724f5c2d51be6..f5a1839001e5c 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/java/org/elasticsearch/ingest/IngestDocumentMustacheIT.java @@ -23,10 +23,13 @@ public void testAccessMetadataViaTemplate() { Map document = new HashMap<>(); document.put("foo", "bar"); IngestDocument ingestDocument = new IngestDocument("index", "id", 1, null, null, document); - ingestDocument.setFieldValue(compile("field1"), ValueSource.wrap("1 {{foo}}", scriptService)); + ingestDocument.setFieldValue(ingestDocument.renderTemplate(compile("field1")), ValueSource.wrap("1 {{foo}}", scriptService)); assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 bar")); - ingestDocument.setFieldValue(compile("field1"), ValueSource.wrap("2 {{_source.foo}}", scriptService)); + ingestDocument.setFieldValue( + ingestDocument.renderTemplate(compile("field1")), + ValueSource.wrap("2 {{_source.foo}}", scriptService) + ); assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("2 bar")); } @@ -38,11 +41,14 @@ public void testAccessMapMetadataViaTemplate() { innerObject.put("qux", Collections.singletonMap("fubar", "hello qux and fubar")); document.put("foo", innerObject); IngestDocument ingestDocument = new IngestDocument("index", "id", 1, null, null, document); - ingestDocument.setFieldValue(compile("field1"), ValueSource.wrap("1 {{foo.bar}} {{foo.baz}} {{foo.qux.fubar}}", scriptService)); + ingestDocument.setFieldValue( + ingestDocument.renderTemplate(compile("field1")), + ValueSource.wrap("1 {{foo.bar}} {{foo.baz}} {{foo.qux.fubar}}", scriptService) + ); assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 hello bar hello baz hello qux and fubar")); ingestDocument.setFieldValue( - compile("field1"), + ingestDocument.renderTemplate(compile("field1")), ValueSource.wrap("2 {{_source.foo.bar}} {{_source.foo.baz}} {{_source.foo.qux.fubar}}", scriptService) ); assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("2 hello bar hello baz hello qux and fubar")); @@ -58,7 +64,10 @@ public void testAccessListMetadataViaTemplate() { list.add(null); document.put("list2", list); IngestDocument ingestDocument = new IngestDocument("index", "id", 1, null, null, document); - ingestDocument.setFieldValue(compile("field1"), ValueSource.wrap("1 {{list1.0}} {{list2.0}}", scriptService)); + ingestDocument.setFieldValue( + ingestDocument.renderTemplate(compile("field1")), + ValueSource.wrap("1 {{list1.0}} {{list2.0}}", scriptService) + ); assertThat(ingestDocument.getFieldValue("field1", String.class), equalTo("1 foo {field=value}")); } @@ -69,7 +78,7 @@ public void testAccessIngestMetadataViaTemplate() { document.put("_ingest", ingestMap); IngestDocument ingestDocument = new IngestDocument("index", "id", 1, null, null, document); ingestDocument.setFieldValue( - compile("ingest_timestamp"), + ingestDocument.renderTemplate(compile("ingest_timestamp")), ValueSource.wrap("{{_ingest.timestamp}} and {{_source._ingest.timestamp}}", scriptService) ); assertThat( diff --git a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/java/org/elasticsearch/ingest/ValueSourceMustacheIT.java b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/java/org/elasticsearch/ingest/ValueSourceMustacheIT.java index c93ef30731960..df4c5827cebc1 100644 --- a/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/java/org/elasticsearch/ingest/ValueSourceMustacheIT.java +++ b/qa/smoke-test-ingest-with-all-dependencies/src/yamlRestTest/java/org/elasticsearch/ingest/ValueSourceMustacheIT.java @@ -57,9 +57,9 @@ public void testValueSourceWithTemplates() { public void testAccessSourceViaTemplate() { IngestDocument ingestDocument = new IngestDocument("marvel", "id", 1, null, null, new HashMap<>()); assertThat(ingestDocument.hasField("marvel"), is(false)); - ingestDocument.setFieldValue(compile("{{_index}}"), ValueSource.wrap("{{_index}}", scriptService)); + ingestDocument.setFieldValue(ingestDocument.renderTemplate(compile("{{_index}}")), ValueSource.wrap("{{_index}}", scriptService)); assertThat(ingestDocument.getFieldValue("marvel", String.class), equalTo("marvel")); - ingestDocument.removeField(compile("{{marvel}}")); + ingestDocument.removeField(ingestDocument.renderTemplate(compile("{{marvel}}"))); assertThat(ingestDocument.hasField("index"), is(false)); } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json index e95621d30fc16..36535109df8e7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.check_in.json @@ -1,7 +1,7 @@ { "connector.check_in": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/check-in-connector-api.html", "description": "Updates the last_seen timestamp in the connector document." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json index dcb3a4f83c287..88c4e85dac2ef 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.delete.json @@ -1,7 +1,7 @@ { "connector.delete": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-connector-api.html", "description": "Deletes a connector." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json index bcddef8cb5cb9..2645df28c5d1e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.get.json @@ -1,7 +1,7 @@ { "connector.get": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-connector-api.html", "description": "Returns the details about a connector." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json index 7bc1504253070..f6d93555b72ed 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.last_sync.json @@ -1,7 +1,7 @@ { "connector.last_sync": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-last-sync-api.html", "description": "Updates the stats of last sync in the connector document." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json index 852a5fbd85998..bc8f12a933b1e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.list.json @@ -1,7 +1,7 @@ { "connector.list": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/list-connector-api.html", "description": "Lists all connectors." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json index e76124bbecf7d..edc865012876e 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.post.json @@ -1,7 +1,7 @@ { "connector.post": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/create-connector-api.html", "description": "Creates a connector." }, "stability": "experimental", @@ -26,7 +26,7 @@ }, "body": { "description": "The connector configuration.", - "required": false + "required": true } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json index 0ab5c18671040..af733de6aa06c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.put.json @@ -1,7 +1,7 @@ { "connector.put": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/create-connector-api.html", "description": "Creates or updates a connector." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json index a82f9e0f29225..1ececd7ea95f7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_configuration.json @@ -1,7 +1,7 @@ { "connector.update_configuration": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-configuration-api.html", "description": "Updates the connector configuration." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json index 51d5a1b25973b..150f71ad033ac 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_error.json @@ -1,7 +1,7 @@ { "connector.update_error": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-error-api.html", "description": "Updates the error field in the connector document." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json index b9815fc111c06..c2a9bf0720746 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_filtering.json @@ -1,7 +1,7 @@ { "connector.update_filtering": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-filtering-api.html", "description": "Updates the filtering field in the connector document." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json index dabac5599932b..a7ca1a9730ab9 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_name.json @@ -1,7 +1,7 @@ { "connector.update_name": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-name-description-api.html", "description": "Updates the name and/or description fields in the connector document." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json index 25687e41a48de..b7ab6abcf088d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_pipeline.json @@ -1,7 +1,7 @@ { "connector.update_pipeline": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-pipeline-api.html", "description": "Updates the pipeline field in the connector document." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json index 8d934b8025145..98cee5c257b90 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector.update_scheduling.json @@ -1,7 +1,7 @@ { "connector.update_scheduling": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/update-connector-scheduling-api.html", "description": "Updates the scheduling field in the connector document." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json index dbea6935f8a87..1e8cf154cf652 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.cancel.json @@ -1,7 +1,7 @@ { "connector_sync_job.cancel": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/cancel-connector-sync-job-api.html", "description": "Cancels a connector sync job." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json index 8193d92395255..a6c96f506b115 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.check_in.json @@ -1,7 +1,7 @@ { "connector_sync_job.check_in": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/check-in-connector-sync-job-api.html", "description": "Checks in a connector sync job (refreshes 'last_seen')." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json index ba9b5095a5275..11894a48db576 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.delete.json @@ -1,7 +1,7 @@ { "connector_sync_job.delete": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-connector-sync-job-api.html", "description": "Deletes a connector sync job." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json index 394e6e2fcb38f..c6fbd15559e2d 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.error.json @@ -1,7 +1,7 @@ { "connector_sync_job.error": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/set-connector-sync-job-error-api.html", "description": "Sets an error for a connector sync job." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json index d0f14b0001bd8..6dd29069badc4 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.get.json @@ -1,7 +1,7 @@ { "connector_sync_job.get": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-connector-sync-job-api.html", "description": "Returns the details about a connector sync job." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json index 86995477f060a..7b816cae1cd00 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.list.json @@ -1,7 +1,7 @@ { "connector_sync_job.list": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/list-connector-sync-jobs-api.html", "description": "Lists all connector sync jobs." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json index 1db58c31dfa38..8050b34014d2c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.post.json @@ -1,7 +1,7 @@ { "connector_sync_job.post": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/create-connector-sync-job-api.html", "description": "Creates a connector sync job." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json index 825e5d8939e2d..d5f18df0a74da 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/connector_sync_job.update_stats.json @@ -1,7 +1,7 @@ { "connector_sync_job.update_stats": { "documentation": { - "url": "https://www.elastic.co/guide/en/enterprise-search/current/connectors.html", + "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/set-connector-sync-job-stats-api.html", "description": "Updates the stats fields in the connector sync job document." }, "stability": "experimental", diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_api_key.json b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_api_key.json index bf782e96a0499..452ad7cef607c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_api_key.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/security.get_api_key.json @@ -45,6 +45,11 @@ "type":"boolean", "default":false, "description": "flag to show the limited-by role descriptors of API Keys" + }, + "active_only":{ + "type":"boolean", + "default":false, + "description": "flag to limit response to only active (not invalidated or expired) API keys" } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym.json b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym.json index 00142ebcf00fc..9273a8dea87c3 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym.json @@ -4,7 +4,7 @@ "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-synonyms-set.html", "description": "Deletes a synonym set" }, - "stability": "experimental", + "stability": "stable", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym_rule.json b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym_rule.json index 11fb113d6b629..5a0de4ab94a7c 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym_rule.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.delete_synonym_rule.json @@ -4,7 +4,7 @@ "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/delete-synonym-rule.html", "description": "Deletes a synonym rule in a synonym set" }, - "stability": "experimental", + "stability": "stable", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym.json b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym.json index 6cb4fcc46f26b..25c177cabbdf1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym.json @@ -4,7 +4,7 @@ "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-synonyms-set.html", "description": "Retrieves a synonym set" }, - "stability": "experimental", + "stability": "stable", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym_rule.json b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym_rule.json index 5a718f1a48e46..ff9e7eb57b8a7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym_rule.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonym_rule.json @@ -4,7 +4,7 @@ "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/get-synonym-rule.html", "description": "Retrieves a synonym rule from a synonym set" }, - "stability": "experimental", + "stability": "stable", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonyms_sets.json b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonyms_sets.json index 66bd8df92e1e7..d94bef32cddcd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonyms_sets.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.get_synonyms_sets.json @@ -4,7 +4,7 @@ "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/list-synonyms-sets.html", "description": "Retrieves a summary of all defined synonym sets" }, - "stability": "experimental", + "stability": "stable", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym.json b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym.json index 6c412d174434b..e09bbb7e428a1 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym.json @@ -4,7 +4,7 @@ "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/put-synonyms-set.html", "description": "Creates or updates a synonyms set" }, - "stability": "experimental", + "stability": "stable", "visibility": "public", "headers": { "accept": [ diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym_rule.json b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym_rule.json index 082432ae662f0..51503b5819862 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym_rule.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/synonyms.put_synonym_rule.json @@ -4,7 +4,7 @@ "url": "https://www.elastic.co/guide/en/elasticsearch/reference/master/put-synonym-rule.html", "description": "Creates or updates a synonym rule in a synonym set" }, - "stability": "experimental", + "stability": "stable", "visibility": "public", "headers": { "accept": [ diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java index 45906abd29ff8..5fa63aaed0508 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/HotThreadsIT.java @@ -11,8 +11,9 @@ import org.apache.lucene.util.Constants; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads; -import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequestBuilder; +import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse; +import org.elasticsearch.action.admin.cluster.node.hotthreads.TransportNodesHotThreadsAction; import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.logging.ChunkedLoggingStreamTests; import org.elasticsearch.core.TimeValue; @@ -23,7 +24,7 @@ import java.util.Map; import java.util.concurrent.CountDownLatch; -import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.index.query.QueryBuilders.boolQuery; @@ -41,38 +42,26 @@ public class HotThreadsIT extends ESIntegTestCase { - public void testHotThreadsDontFail() throws ExecutionException, InterruptedException { - /** - * This test just checks if nothing crashes or gets stuck etc. - */ + public void testHotThreadsDontFail() throws InterruptedException { + // This test just checks if nothing crashes or gets stuck etc. createIndex("test"); final int iters = scaledRandomIntBetween(2, 20); final AtomicBoolean hasErrors = new AtomicBoolean(false); for (int i = 0; i < iters; i++) { - final String type; - NodesHotThreadsRequestBuilder nodesHotThreadsRequestBuilder = clusterAdmin().prepareNodesHotThreads(); + final NodesHotThreadsRequest request = new NodesHotThreadsRequest(); if (randomBoolean()) { TimeValue timeValue = new TimeValue(rarely() ? randomIntBetween(500, 5000) : randomIntBetween(20, 500)); - nodesHotThreadsRequestBuilder.setInterval(timeValue); + request.interval(timeValue); } if (randomBoolean()) { - nodesHotThreadsRequestBuilder.setThreads(rarely() ? randomIntBetween(500, 5000) : randomIntBetween(1, 500)); + request.threads(rarely() ? randomIntBetween(500, 5000) : randomIntBetween(1, 500)); } - nodesHotThreadsRequestBuilder.setIgnoreIdleThreads(randomBoolean()); + request.ignoreIdleThreads(randomBoolean()); if (randomBoolean()) { - type = switch (randomIntBetween(0, 3)) { - case 3 -> "mem"; - case 2 -> "cpu"; - case 1 -> "wait"; - default -> "block"; - }; - assertThat(type, notNullValue()); - nodesHotThreadsRequestBuilder.setType(HotThreads.ReportType.of(type)); - } else { - type = null; + request.type(HotThreads.ReportType.of(randomFrom("block", "mem", "cpu", "wait"))); } final CountDownLatch latch = new CountDownLatch(1); - nodesHotThreadsRequestBuilder.execute(new ActionListener() { + client().execute(TransportNodesHotThreadsAction.TYPE, request, new ActionListener<>() { @Override public void onResponse(NodesHotThreadsResponse nodeHotThreads) { boolean success = false; @@ -83,7 +72,6 @@ public void onResponse(NodesHotThreadsResponse nodeHotThreads) { assertThat(nodesMap.size(), equalTo(cluster().size())); for (NodeHotThreads ht : nodeHotThreads.getNodes()) { assertNotNull(ht.getHotThreads()); - // logger.info(ht.getHotThreads()); } success = true; } finally { @@ -120,40 +108,39 @@ public void onFailure(Exception e) { 3L ); } - latch.await(); + safeAwait(latch); assertThat(hasErrors.get(), is(false)); } } - public void testIgnoreIdleThreads() throws ExecutionException, InterruptedException { + public void testIgnoreIdleThreads() { assumeTrue("no support for hot_threads on FreeBSD", Constants.FREE_BSD == false); // First time, don't ignore idle threads: - NodesHotThreadsRequestBuilder builder = clusterAdmin().prepareNodesHotThreads(); - builder.setIgnoreIdleThreads(false); - builder.setThreads(Integer.MAX_VALUE); - NodesHotThreadsResponse response = builder.execute().get(); + final NodesHotThreadsResponse firstResponse = client().execute( + TransportNodesHotThreadsAction.TYPE, + new NodesHotThreadsRequest().ignoreIdleThreads(false).threads(Integer.MAX_VALUE) + ).actionGet(10, TimeUnit.SECONDS); final Matcher containsCachedTimeThreadRunMethod = containsString( "org.elasticsearch.threadpool.ThreadPool$CachedTimeThread.run" ); int totSizeAll = 0; - for (NodeHotThreads node : response.getNodesMap().values()) { + for (NodeHotThreads node : firstResponse.getNodesMap().values()) { totSizeAll += node.getHotThreads().length(); assertThat(node.getHotThreads(), containsCachedTimeThreadRunMethod); } // Second time, do ignore idle threads: - builder = clusterAdmin().prepareNodesHotThreads(); - builder.setThreads(Integer.MAX_VALUE); - + final var request = new NodesHotThreadsRequest().threads(Integer.MAX_VALUE); // Make sure default is true: - assertEquals(true, builder.request().ignoreIdleThreads()); - response = builder.execute().get(); + assertTrue(request.ignoreIdleThreads()); + final NodesHotThreadsResponse secondResponse = client().execute(TransportNodesHotThreadsAction.TYPE, request) + .actionGet(10, TimeUnit.SECONDS); int totSizeIgnoreIdle = 0; - for (NodeHotThreads node : response.getNodesMap().values()) { + for (NodeHotThreads node : secondResponse.getNodesMap().values()) { totSizeIgnoreIdle += node.getHotThreads().length(); assertThat(node.getHotThreads(), not(containsCachedTimeThreadRunMethod)); } @@ -162,22 +149,26 @@ public void testIgnoreIdleThreads() throws ExecutionException, InterruptedExcept assertThat(totSizeIgnoreIdle, lessThan(totSizeAll)); } - public void testTimestampAndParams() throws ExecutionException, InterruptedException { + public void testTimestampAndParams() { - NodesHotThreadsResponse response = clusterAdmin().prepareNodesHotThreads().execute().get(); + final NodesHotThreadsResponse response = client().execute(TransportNodesHotThreadsAction.TYPE, new NodesHotThreadsRequest()) + .actionGet(10, TimeUnit.SECONDS); if (Constants.FREE_BSD) { for (NodeHotThreads node : response.getNodesMap().values()) { - String result = node.getHotThreads(); - assertTrue(result.indexOf("hot_threads is not supported") != -1); + assertThat(node.getHotThreads(), containsString("hot_threads is not supported")); } } else { for (NodeHotThreads node : response.getNodesMap().values()) { - String result = node.getHotThreads(); - assertTrue(result.indexOf("Hot threads at") != -1); - assertTrue(result.indexOf("interval=500ms") != -1); - assertTrue(result.indexOf("busiestThreads=3") != -1); - assertTrue(result.indexOf("ignoreIdleThreads=true") != -1); + assertThat( + node.getHotThreads(), + allOf( + containsString("Hot threads at"), + containsString("interval=500ms"), + containsString("busiestThreads=3"), + containsString("ignoreIdleThreads=true") + ) + ); } } } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java index ad17e4f0d49dd..4aa3598608fb6 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/ReloadSecureSettingsIT.java @@ -11,10 +11,13 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsResponse; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.KeyStoreWrapper; +import org.elasticsearch.common.settings.SecureSetting; import org.elasticsearch.common.settings.SecureSettings; import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.plugins.Plugin; @@ -43,6 +46,8 @@ @ESIntegTestCase.ClusterScope(minNumDataNodes = 2) public class ReloadSecureSettingsIT extends ESIntegTestCase { + private static final String VALID_SECURE_SETTING_NAME = "some.setting.that.exists"; + @BeforeClass public static void disableInFips() { // Reload secure settings with a password protected keystore is tested in ReloadSecureSettingsWithPasswordProtectedKeystoreRestIT @@ -350,9 +355,46 @@ public void testReloadWhileKeystoreChanged() throws Exception { } } + public void testInvalidKeyInSettings() throws Exception { + final Environment environment = internalCluster().getInstance(Environment.class); + + try (KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create()) { + keyStoreWrapper.setString(VALID_SECURE_SETTING_NAME, new char[0]); + keyStoreWrapper.save(environment.configFile(), new char[0], false); + } + + PlainActionFuture actionFuture = new PlainActionFuture<>(); + clusterAdmin().prepareReloadSecureSettings() + .setSecureStorePassword(new SecureString(new char[0])) + .setNodesIds(Strings.EMPTY_ARRAY) + .execute(actionFuture); + + actionFuture.get().getNodes().forEach(nodeResponse -> assertThat(nodeResponse.reloadException(), nullValue())); + + try (KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create()) { + assertThat(keyStoreWrapper, notNullValue()); + keyStoreWrapper.setString("some.setting.that.does.not.exist", new char[0]); + keyStoreWrapper.save(environment.configFile(), new char[0], false); + } + + actionFuture = new PlainActionFuture<>(); + clusterAdmin().prepareReloadSecureSettings() + .setSecureStorePassword(new SecureString(new char[0])) + .setNodesIds(Strings.EMPTY_ARRAY) + .execute(actionFuture); + + actionFuture.get() + .getNodes() + .forEach(nodeResponse -> assertThat(nodeResponse.reloadException(), instanceOf(IllegalArgumentException.class))); + } + @Override protected Collection> nodePlugins() { - final List> plugins = Arrays.asList(MockReloadablePlugin.class, MisbehavingReloadablePlugin.class); + final List> plugins = Arrays.asList( + MockWithSecureSettingPlugin.class, + MockReloadablePlugin.class, + MisbehavingReloadablePlugin.class + ); // shuffle as reload is called in order Collections.shuffle(plugins, random()); return plugins; @@ -455,4 +497,10 @@ public synchronized void setShouldThrow(boolean shouldThrow) { } } + public static class MockWithSecureSettingPlugin extends Plugin { + public List> getSettings() { + return List.of(SecureSetting.secureString(VALID_SECURE_SETTING_NAME, null)); + } + }; + } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java index 93fc17a9a02eb..1fda9c67a0beb 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/stats/ClusterStatsIT.java @@ -207,7 +207,7 @@ public void testValuesSmokeScreen() throws IOException, ExecutionException, Inte ClusterStatsResponse response = clusterAdmin().prepareClusterStats().get(); String msg = response.toString(); assertThat(msg, response.getTimestamp(), greaterThan(946681200000L)); // 1 Jan 2000 - assertThat(msg, response.indicesStats.getStore().getSizeInBytes(), greaterThan(0L)); + assertThat(msg, response.indicesStats.getStore().sizeInBytes(), greaterThan(0L)); assertThat(msg, response.nodesStats.getFs().getTotal().getBytes(), greaterThan(0L)); assertThat(msg, response.nodesStats.getJvm().getVersions().size(), greaterThan(0)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/PendingTasksBlocksIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/PendingTasksBlocksIT.java index 595788b1eb9f5..eaf8948348684 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/PendingTasksBlocksIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/cluster/tasks/PendingTasksBlocksIT.java @@ -37,7 +37,7 @@ public void testPendingTasksWithIndexBlocks() { )) { try { enableIndexBlock("test", blockSetting); - PendingClusterTasksResponse response = clusterAdmin().preparePendingClusterTasks().get(); + PendingClusterTasksResponse response = getClusterPendingTasks(); assertNotNull(response.pendingTasks()); } finally { disableIndexBlock("test", blockSetting); @@ -53,7 +53,7 @@ public void testPendingTasksWithClusterReadOnlyBlock() { try { setClusterReadOnly(true); - PendingClusterTasksResponse response = clusterAdmin().preparePendingClusterTasks().get(); + PendingClusterTasksResponse response = getClusterPendingTasks(); assertNotNull(response.pendingTasks()); } finally { setClusterReadOnly(false); @@ -80,7 +80,7 @@ public boolean validateClusterForming() { } }); - assertNotNull(clusterAdmin().preparePendingClusterTasks().get().pendingTasks()); + assertNotNull(getClusterPendingTasks().pendingTasks()); // starting one more node allows the cluster to recover internalCluster().startNode(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java index 310f9394f60c1..dbc124d40a591 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/admin/indices/shards/IndicesShardStoreRequestIT.java @@ -108,8 +108,13 @@ public void testIndices() throws Exception { String index1 = "test1"; String index2 = "test2"; internalCluster().ensureAtLeastNumDataNodes(2); - assertAcked(prepareCreate(index1).setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, "2"))); - assertAcked(prepareCreate(index2).setSettings(Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, "2"))); + for (final var index : List.of(index1, index2)) { + final var settings = Settings.builder().put(IndexMetadata.SETTING_NUMBER_OF_SHARDS, 2); + if (randomBoolean()) { + settings.put(IndexMetadata.SETTING_INDEX_HIDDEN, randomBoolean()); + } + assertAcked(prepareCreate(index).setSettings(settings)); + } indexRandomData(index1); indexRandomData(index2); ensureGreen(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java index 21bbd32e6bf26..10fe7982948ba 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/action/search/PointInTimeIT.java @@ -83,7 +83,7 @@ public void testBasic() { } refresh("test"); String pitId = openPointInTime(new String[] { "test" }, TimeValue.timeValueMinutes(2)); - assertResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp1 -> { + assertResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp1 -> { assertThat(resp1.pointInTimeId(), equalTo(pitId)); assertHitCount(resp1, numDocs); }); @@ -99,13 +99,13 @@ public void testBasic() { if (randomBoolean()) { final int delDocCount = deletedDocs; assertNoFailuresAndResponse( - prepareSearch("test").setPreference(null).setQuery(new MatchAllQueryBuilder()), + prepareSearch("test").setQuery(new MatchAllQueryBuilder()), resp2 -> assertHitCount(resp2, numDocs - delDocCount) ); } try { assertNoFailuresAndResponse( - prepareSearch().setPreference(null).setQuery(new MatchAllQueryBuilder()).setPointInTime(new PointInTimeBuilder(pitId)), + prepareSearch().setQuery(new MatchAllQueryBuilder()).setPointInTime(new PointInTimeBuilder(pitId)), resp3 -> { assertHitCount(resp3, numDocs); assertThat(resp3.pointInTimeId(), equalTo(pitId)); @@ -131,7 +131,7 @@ public void testMultipleIndices() { String pitId = openPointInTime(new String[] { "*" }, TimeValue.timeValueMinutes(2)); try { int moreDocs = randomIntBetween(10, 50); - assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp -> { assertHitCount(resp, numDocs); assertNotNull(resp.pointInTimeId()); assertThat(resp.pointInTimeId(), equalTo(pitId)); @@ -143,7 +143,7 @@ public void testMultipleIndices() { refresh(); }); assertNoFailuresAndResponse(prepareSearch(), resp -> assertHitCount(resp, numDocs + moreDocs)); - assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp -> { assertHitCount(resp, numDocs); assertNotNull(resp.pointInTimeId()); assertThat(resp.pointInTimeId(), equalTo(pitId)); @@ -212,7 +212,7 @@ public void testRelocation() throws Exception { refresh(); String pitId = openPointInTime(new String[] { "test" }, TimeValue.timeValueMinutes(2)); try { - assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp -> { assertHitCount(resp, numDocs); assertThat(resp.pointInTimeId(), equalTo(pitId)); }); @@ -232,7 +232,7 @@ public void testRelocation() throws Exception { } refresh(); } - assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp -> { assertHitCount(resp, numDocs); assertThat(resp.pointInTimeId(), equalTo(pitId)); }); @@ -245,7 +245,7 @@ public void testRelocation() throws Exception { .collect(Collectors.toSet()); assertThat(assignedNodes, everyItem(not(in(excludedNodes)))); }, 30, TimeUnit.SECONDS); - assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp -> { assertHitCount(resp, numDocs); assertThat(resp.pointInTimeId(), equalTo(pitId)); }); @@ -263,7 +263,7 @@ public void testPointInTimeNotFound() throws Exception { } refresh(); String pit = openPointInTime(new String[] { "index" }, TimeValue.timeValueSeconds(5)); - assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)), resp1 -> { + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pit)), resp1 -> { assertHitCount(resp1, index1); if (rarely()) { try { @@ -280,7 +280,7 @@ public void testPointInTimeNotFound() throws Exception { }); SearchPhaseExecutionException e = expectThrows( SearchPhaseExecutionException.class, - () -> prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)).get() + () -> prepareSearch().setPointInTime(new PointInTimeBuilder(pit)).get() ); for (ShardSearchFailure failure : e.shardFailures()) { assertThat(ExceptionsHelper.unwrapCause(failure.getCause()), instanceOf(SearchContextMissingException.class)); @@ -306,7 +306,7 @@ public void testIndexNotFound() { String pit = openPointInTime(new String[] { "index-*" }, TimeValue.timeValueMinutes(2)); try { assertNoFailuresAndResponse( - prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pit)), + prepareSearch().setPointInTime(new PointInTimeBuilder(pit)), resp -> assertHitCount(resp, index1 + index2) ); indicesAdmin().prepareDelete("index-1").get(); @@ -315,21 +315,15 @@ public void testIndexNotFound() { } // Allow partial search result - assertResponse( - prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pit)), - resp -> { - assertFailures(resp); - assertHitCount(resp, index2); - } - ); + assertResponse(prepareSearch().setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pit)), resp -> { + assertFailures(resp); + assertHitCount(resp, index2); + }); // Do not allow partial search result expectThrows( ElasticsearchException.class, - () -> prepareSearch().setPreference(null) - .setAllowPartialSearchResults(false) - .setPointInTime(new PointInTimeBuilder(pit)) - .get() + () -> prepareSearch().setAllowPartialSearchResults(false).setPointInTime(new PointInTimeBuilder(pit)).get() ); } finally { closePointInTime(pit); @@ -365,7 +359,6 @@ public void testCanMatch() throws Exception { assertResponse( prepareSearch().setQuery(new RangeQueryBuilder("created_date").gte("2020-01-02").lte("2020-01-03")) .setSearchType(SearchType.QUERY_THEN_FETCH) - .setPreference(null) .setPreFilterShardSize(randomIntBetween(2, 3)) .setMaxConcurrentShardRequests(randomIntBetween(1, 2)) .setPointInTime(new PointInTimeBuilder(pitId)), @@ -422,20 +415,17 @@ public void testPartialResults() throws Exception { refresh(); String pitId = openPointInTime(new String[] { "test-*" }, TimeValue.timeValueMinutes(2)); try { - assertNoFailuresAndResponse(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp -> { assertHitCount(resp, numDocs1 + numDocs2); assertThat(resp.pointInTimeId(), equalTo(pitId)); }); internalCluster().restartNode(assignedNodeForIndex1); - assertResponse( - prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pitId)), - resp -> { - assertFailures(resp); - assertThat(resp.pointInTimeId(), equalTo(pitId)); - assertHitCount(resp, numDocs2); - } - ); + assertResponse(prepareSearch().setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertFailures(resp); + assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertHitCount(resp, numDocs2); + }); } finally { closePointInTime(pitId); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterHealthIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterHealthIT.java index 214e3f73144d9..7a8accf8cc7ce 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterHealthIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/ClusterHealthIT.java @@ -46,8 +46,8 @@ public void testSimpleLocalHealth() { .prepareHealth() .setLocal(true) .setWaitForEvents(Priority.LANGUID) - .setTimeout("30s") - .get("10s"); + .setTimeout(TimeValue.timeValueSeconds(30)) + .get(TimeValue.timeValueSeconds(10)); logger.info("--> got cluster health on [{}]", node); assertFalse("timed out on " + node, health.isTimedOut()); assertThat("health status on " + node, health.getStatus(), equalTo(ClusterHealthStatus.GREEN)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java index b1ac5b02f7dd2..c044fafe31efc 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderIT.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotResponse; import org.elasticsearch.action.admin.indices.stats.ShardStats; -import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.cluster.ClusterInfoService; import org.elasticsearch.cluster.ClusterInfoServiceUtils; import org.elasticsearch.cluster.DiskUsageIntegTestCase; @@ -39,13 +38,16 @@ import org.hamcrest.TypeSafeMatcher; import java.util.Arrays; +import java.util.Comparator; import java.util.HashSet; -import java.util.Locale; +import java.util.List; import java.util.Set; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; -import static java.util.stream.Collectors.toMap; import static java.util.stream.Collectors.toSet; +import static org.elasticsearch.cluster.routing.RoutingNodesHelper.numberOfShardsWithState; +import static org.elasticsearch.cluster.routing.allocation.decider.EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING; import static org.elasticsearch.index.store.Store.INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.empty; @@ -74,26 +76,25 @@ public void testHighWatermarkNotExceeded() throws Exception { final String dataNodeName = internalCluster().startDataOnlyNode(); ensureStableCluster(3); - final InternalClusterInfoService clusterInfoService = (InternalClusterInfoService) internalCluster().getCurrentMasterNodeInstance( - ClusterInfoService.class - ); - internalCluster().getCurrentMasterNodeInstance(ClusterService.class) - .addListener(event -> ClusterInfoServiceUtils.refresh(clusterInfoService)); + final InternalClusterInfoService clusterInfoService = getInternalClusterInfoService(); + internalCluster().getCurrentMasterNodeInstance(ClusterService.class).addListener(event -> { + ClusterInfoServiceUtils.refresh(clusterInfoService); + }); final String dataNode0Id = internalCluster().getInstance(NodeEnvironment.class, dataNodeName).nodeId(); - final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + final String indexName = randomIdentifier(); createIndex(indexName, indexSettings(6, 0).put(INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), "0ms").build()); - var smallestShard = createReasonableSizedShards(indexName); + var shardSizes = createReasonableSizedShards(indexName); // reduce disk size of node 0 so that no shards fit below the high watermark, forcing all shards onto the other data node // (subtract the translog size since the disk threshold decider ignores this and may therefore move the shard back again) - getTestFileStore(dataNodeName).setTotalSpace(smallestShard.size + WATERMARK_BYTES - 1L); + getTestFileStore(dataNodeName).setTotalSpace(shardSizes.getSmallestShardSize() + WATERMARK_BYTES - 1L); assertBusyWithDiskUsageRefresh(dataNode0Id, indexName, empty()); // increase disk size of node 0 to allow just enough room for one shard, and check that it's rebalanced back - getTestFileStore(dataNodeName).setTotalSpace(smallestShard.size + WATERMARK_BYTES); - assertBusyWithDiskUsageRefresh(dataNode0Id, indexName, new ContainsExactlyOneOf<>(smallestShard.shardIds)); + getTestFileStore(dataNodeName).setTotalSpace(shardSizes.getSmallestShardSize() + WATERMARK_BYTES); + assertBusyWithDiskUsageRefresh(dataNode0Id, indexName, new ContainsExactlyOneOf<>(shardSizes.getSmallestShardIds())); } public void testRestoreSnapshotAllocationDoesNotExceedWatermark() throws Exception { @@ -108,17 +109,20 @@ public void testRestoreSnapshotAllocationDoesNotExceedWatermark() throws Excepti .setSettings(Settings.builder().put("location", randomRepoPath()).put("compress", randomBoolean())) ); - final InternalClusterInfoService clusterInfoService = (InternalClusterInfoService) internalCluster().getCurrentMasterNodeInstance( - ClusterInfoService.class - ); - internalCluster().getCurrentMasterNodeInstance(ClusterService.class) - .addListener(event -> ClusterInfoServiceUtils.refresh(clusterInfoService)); + final AtomicBoolean allowRelocations = new AtomicBoolean(true); + final InternalClusterInfoService clusterInfoService = getInternalClusterInfoService(); + internalCluster().getCurrentMasterNodeInstance(ClusterService.class).addListener(event -> { + ClusterInfoServiceUtils.refresh(clusterInfoService); + if (allowRelocations.get() == false) { + assertThat(numberOfShardsWithState(event.state().getRoutingNodes(), ShardRoutingState.RELOCATING), equalTo(0)); + } + }); final String dataNode0Id = internalCluster().getInstance(NodeEnvironment.class, dataNodeName).nodeId(); - final String indexName = randomAlphaOfLength(10).toLowerCase(Locale.ROOT); + final String indexName = randomIdentifier(); createIndex(indexName, indexSettings(6, 0).put(INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), "0ms").build()); - var smallestShard = createReasonableSizedShards(indexName); + var shardSizes = createReasonableSizedShards(indexName); final CreateSnapshotResponse createSnapshotResponse = clusterAdmin().prepareCreateSnapshot("repo", "snap") .setWaitForCompletion(true) @@ -128,15 +132,13 @@ public void testRestoreSnapshotAllocationDoesNotExceedWatermark() throws Excepti assertThat(snapshotInfo.state(), is(SnapshotState.SUCCESS)); assertAcked(indicesAdmin().prepareDelete(indexName).get()); + updateClusterSettings(Settings.builder().put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), Rebalance.NONE.toString())); + allowRelocations.set(false); // reduce disk size of node 0 so that no shards fit below the low watermark, forcing shards to be assigned to the other data node - getTestFileStore(dataNodeName).setTotalSpace(smallestShard.size + WATERMARK_BYTES - 1L); + getTestFileStore(dataNodeName).setTotalSpace(shardSizes.getSmallestShardSize() + WATERMARK_BYTES - 1L); refreshDiskUsage(); - updateClusterSettings( - Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), Rebalance.NONE.toString()) - ); - final RestoreSnapshotResponse restoreSnapshotResponse = clusterAdmin().prepareRestoreSnapshot("repo", "snap") .setWaitForCompletion(true) .get(); @@ -144,13 +146,71 @@ public void testRestoreSnapshotAllocationDoesNotExceedWatermark() throws Excepti assertThat(restoreInfo.successfulShards(), is(snapshotInfo.totalShards())); assertThat(restoreInfo.failedShards(), is(0)); - assertBusy(() -> assertThat(getShardIds(dataNode0Id, indexName), empty())); + assertThat(getShardIds(dataNode0Id, indexName), empty()); - updateClusterSettings(Settings.builder().putNull(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey())); + allowRelocations.set(true); + updateClusterSettings(Settings.builder().putNull(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey())); // increase disk size of node 0 to allow just enough room for one shard, and check that it's rebalanced back - getTestFileStore(dataNodeName).setTotalSpace(smallestShard.size + WATERMARK_BYTES); - assertBusyWithDiskUsageRefresh(dataNode0Id, indexName, new ContainsExactlyOneOf<>(smallestShard.shardIds)); + getTestFileStore(dataNodeName).setTotalSpace(shardSizes.getSmallestShardSize() + WATERMARK_BYTES); + assertBusyWithDiskUsageRefresh(dataNode0Id, indexName, new ContainsExactlyOneOf<>(shardSizes.getSmallestShardIds())); + } + + public void testRestoreSnapshotAllocationDoesNotExceedWatermarkWithMultipleShards() throws Exception { + internalCluster().startMasterOnlyNode(); + internalCluster().startDataOnlyNode(); + final String dataNodeName = internalCluster().startDataOnlyNode(); + ensureStableCluster(3); + + assertAcked( + clusterAdmin().preparePutRepository("repo") + .setType(FsRepository.TYPE) + .setSettings(Settings.builder().put("location", randomRepoPath()).put("compress", randomBoolean())) + ); + + final AtomicBoolean allowRelocations = new AtomicBoolean(true); + final InternalClusterInfoService clusterInfoService = getInternalClusterInfoService(); + internalCluster().getCurrentMasterNodeInstance(ClusterService.class).addListener(event -> { + ClusterInfoServiceUtils.refresh(clusterInfoService); + if (allowRelocations.get() == false) { + assertThat(numberOfShardsWithState(event.state().getRoutingNodes(), ShardRoutingState.RELOCATING), equalTo(0)); + } + }); + + final String dataNode0Id = internalCluster().getInstance(NodeEnvironment.class, dataNodeName).nodeId(); + + final String indexName = randomIdentifier(); + createIndex(indexName, indexSettings(6, 0).put(INDEX_STORE_STATS_REFRESH_INTERVAL_SETTING.getKey(), "0ms").build()); + var shardSizes = createReasonableSizedShards(indexName); + + final CreateSnapshotResponse createSnapshotResponse = clusterAdmin().prepareCreateSnapshot("repo", "snap") + .setWaitForCompletion(true) + .get(); + final SnapshotInfo snapshotInfo = createSnapshotResponse.getSnapshotInfo(); + assertThat(snapshotInfo.successfulShards(), is(snapshotInfo.totalShards())); + assertThat(snapshotInfo.state(), is(SnapshotState.SUCCESS)); + + assertAcked(indicesAdmin().prepareDelete(indexName).get()); + updateClusterSettings(Settings.builder().put(CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), Rebalance.NONE.toString())); + allowRelocations.set(false); + + // reduce disk size of node 0 so that only 1 of 2 smallest shards can be allocated + var usableSpace = shardSizes.sizes().get(1).size(); + getTestFileStore(dataNodeName).setTotalSpace(usableSpace + WATERMARK_BYTES + 1L); + refreshDiskUsage(); + + final RestoreSnapshotResponse restoreSnapshotResponse = clusterAdmin().prepareRestoreSnapshot("repo", "snap") + .setWaitForCompletion(true) + .get(); + final RestoreInfo restoreInfo = restoreSnapshotResponse.getRestoreInfo(); + assertThat(restoreInfo.successfulShards(), is(snapshotInfo.totalShards())); + assertThat(restoreInfo.failedShards(), is(0)); + + assertBusyWithDiskUsageRefresh( + dataNode0Id, + indexName, + new ContainsExactlyOneOf<>(shardSizes.getShardIdsWithSizeSmallerOrEqual(usableSpace)) + ); } private Set getShardIds(final String nodeId, final String indexName) { @@ -178,13 +238,9 @@ private Set getShardIds(final String nodeId, final String indexName) { /** * Index documents until all the shards are at least WATERMARK_BYTES in size, and return the one with the smallest size */ - private SmallestShards createReasonableSizedShards(final String indexName) throws InterruptedException { + private ShardSizes createReasonableSizedShards(final String indexName) throws InterruptedException { while (true) { - final IndexRequestBuilder[] indexRequestBuilders = new IndexRequestBuilder[scaledRandomIntBetween(100, 10000)]; - for (int i = 0; i < indexRequestBuilders.length; i++) { - indexRequestBuilders[i] = prepareIndex(indexName).setSource("field", randomAlphaOfLength(10)); - } - indexRandom(true, indexRequestBuilders); + indexRandom(true, indexName, scaledRandomIntBetween(100, 10000)); forceMerge(); refresh(); @@ -201,23 +257,36 @@ private SmallestShards createReasonableSizedShards(final String indexName) throw .orElseThrow(() -> new AssertionError("no shards")); if (smallestShardSize > WATERMARK_BYTES) { - var smallestShardIds = Arrays.stream(shardStates) - .filter(it -> it.getStats().getStore().sizeInBytes() == smallestShardSize) - .map(it -> removeIndexUUID(it.getShardRouting().shardId())) - .collect(toSet()); - - logger.info( - "Created shards with sizes {}", - Arrays.stream(shardStates) - .collect(toMap(it -> it.getShardRouting().shardId(), it -> it.getStats().getStore().sizeInBytes())) - ); - - return new SmallestShards(smallestShardSize, smallestShardIds); + var shardSizes = Arrays.stream(shardStates) + .map(it -> new ShardSize(removeIndexUUID(it.getShardRouting().shardId()), it.getStats().getStore().sizeInBytes())) + .sorted(Comparator.comparing(ShardSize::size)) + .toList(); + logger.info("Created shards with sizes {}", shardSizes); + return new ShardSizes(shardSizes); } } } - private record SmallestShards(long size, Set shardIds) {} + private record ShardSizes(List sizes) { + + public long getSmallestShardSize() { + return sizes.get(0).size(); + } + + public Set getShardIdsWithSizeSmallerOrEqual(long size) { + return sizes.stream().filter(entry -> entry.size <= size).map(ShardSize::shardId).collect(toSet()); + } + + public Set getSmallestShardIds() { + return getShardIdsWithSizeSmallerOrEqual(getSmallestShardSize()); + } + + public Set getAllShardIds() { + return sizes.stream().map(ShardSize::shardId).collect(toSet()); + } + } + + private record ShardSize(ShardId shardId, long size) {} private static ShardId removeIndexUUID(ShardId shardId) { return ShardId.fromString(shardId.toString()); @@ -246,16 +315,20 @@ private void refreshDiskUsage() { ); } - private void assertBusyWithDiskUsageRefresh(String nodeName, String indexName, Matcher> matcher) throws Exception { + private void assertBusyWithDiskUsageRefresh(String nodeId, String indexName, Matcher> matcher) throws Exception { assertBusy(() -> { // refresh the master's ClusterInfoService before checking the assigned shards because DiskThresholdMonitor might still // be processing a previous ClusterInfo update and will skip the new one (see DiskThresholdMonitor#onNewInfo(ClusterInfo) // and its internal checkInProgress flag) refreshDiskUsage(); - final Set shardRoutings = getShardIds(nodeName, indexName); + final Set shardRoutings = getShardIds(nodeId, indexName); assertThat("Mismatching shard routings: " + shardRoutings, shardRoutings, matcher); - }, 30L, TimeUnit.SECONDS); + }, 5L, TimeUnit.SECONDS); + } + + private InternalClusterInfoService getInternalClusterInfoService() { + return (InternalClusterInfoService) internalCluster().getCurrentMasterNodeInstance(ClusterInfoService.class); } private static final class ContainsExactlyOneOf extends TypeSafeMatcher> { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/cluster/service/ClusterServiceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/cluster/service/ClusterServiceIT.java index 873f8083f4e0c..fde465346d4be 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/cluster/service/ClusterServiceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/cluster/service/ClusterServiceIT.java @@ -357,7 +357,7 @@ public void clusterStateProcessed(ClusterState oldState, ClusterState newState) assertTrue(controlSources.isEmpty()); controlSources = new HashSet<>(Arrays.asList("1", "2", "3", "4", "5", "6", "7", "8", "9", "10")); - PendingClusterTasksResponse response = internalCluster().coordOnlyNodeClient().admin().cluster().preparePendingClusterTasks().get(); + PendingClusterTasksResponse response = getClusterPendingTasks(internalCluster().coordOnlyNodeClient()); assertThat(response.pendingTasks().size(), greaterThanOrEqualTo(10)); assertThat(response.pendingTasks().get(0).getSource().string(), equalTo("1")); assertThat(response.pendingTasks().get(0).isExecuting(), equalTo(true)); @@ -419,7 +419,7 @@ public void onFailure(Exception e) { } assertTrue(controlSources.isEmpty()); - response = internalCluster().coordOnlyNodeClient().admin().cluster().preparePendingClusterTasks().get(); + response = getClusterPendingTasks(internalCluster().coordOnlyNodeClient()); assertThat(response.pendingTasks().size(), greaterThanOrEqualTo(5)); controlSources = new HashSet<>(Arrays.asList("1", "2", "3", "4", "5")); for (PendingClusterTask task : response.pendingTasks()) { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java index 4aabf0ac66a32..a0efb81c18668 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/discovery/ClusterDisruptionIT.java @@ -109,7 +109,7 @@ static ConflictMode randomMode() { public void testAckedIndexing() throws Exception { final int seconds = (TEST_NIGHTLY && rarely()) == false ? 1 : 5; - final String timeout = seconds + "s"; + final TimeValue timeout = TimeValue.timeValueSeconds(seconds); final List nodes = startCluster(rarely() ? 5 : 3); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java index ec79b53ccd174..c1da93140a0b0 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/index/store/CorruptedFileIT.java @@ -182,11 +182,7 @@ public void testCorruptFileAndRecover() throws InterruptedException, IOException .waitForNoRelocatingShards(true) ).actionGet(); if (health.isTimedOut()) { - logger.info( - "cluster state:\n{}\n{}", - clusterAdmin().prepareState().get().getState(), - clusterAdmin().preparePendingClusterTasks().get() - ); + logger.info("cluster state:\n{}\n{}", clusterAdmin().prepareState().get().getState(), getClusterPendingTasks()); assertThat("timed out waiting for green state", health.isTimedOut(), equalTo(false)); } assertThat(health.getStatus(), equalTo(ClusterHealthStatus.GREEN)); @@ -295,11 +291,7 @@ public void testCorruptPrimaryNoReplica() throws ExecutionException, Interrupted if (response.getStatus() != ClusterHealthStatus.RED) { logger.info("Cluster turned red in busy loop: {}", didClusterTurnRed); - logger.info( - "cluster state:\n{}\n{}", - clusterAdmin().prepareState().get().getState(), - clusterAdmin().preparePendingClusterTasks().get() - ); + logger.info("cluster state:\n{}\n{}", clusterAdmin().prepareState().get().getState(), getClusterPendingTasks()); } assertThat(response.getStatus(), is(ClusterHealthStatus.RED)); ClusterState state = clusterAdmin().prepareState().get().getState(); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java index 0f7ca38ca8f6b..984082ec65193 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/mapping/SimpleGetFieldMappingsIT.java @@ -159,7 +159,9 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { String responseStrings = Strings.toString(responseBuilder); XContentBuilder prettyJsonBuilder = XContentFactory.jsonBuilder().prettyPrint(); - prettyJsonBuilder.copyCurrentStructure(createParser(JsonXContent.jsonXContent, responseStrings)); + try (var parser = createParser(JsonXContent.jsonXContent, responseStrings)) { + prettyJsonBuilder.copyCurrentStructure(parser); + } assertThat(responseStrings, equalTo(Strings.toString(prettyJsonBuilder))); params.put("pretty", "false"); @@ -170,7 +172,9 @@ public void testSimpleGetFieldMappingsWithPretty() throws Exception { responseStrings = Strings.toString(responseBuilder); prettyJsonBuilder = XContentFactory.jsonBuilder().prettyPrint(); - prettyJsonBuilder.copyCurrentStructure(createParser(JsonXContent.jsonXContent, responseStrings)); + try (var parser = createParser(JsonXContent.jsonXContent, responseStrings)) { + prettyJsonBuilder.copyCurrentStructure(parser); + } assertThat(responseStrings, not(equalTo(Strings.toString(prettyJsonBuilder)))); } diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java index 0fe5845e9ed32..779072272e59a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexPrimaryRelocationIT.java @@ -8,22 +8,23 @@ package org.elasticsearch.indices.recovery; +import org.apache.logging.log4j.Level; import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.elasticsearch.common.Priority; +import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.monitor.jvm.HotThreads; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.hamcrest.ElasticsearchAssertions; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; -import java.util.stream.Collectors; @ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST) public class IndexPrimaryRelocationIT extends ESIntegTestCase { @@ -71,20 +72,14 @@ public void run() { .setWaitForNoRelocatingShards(true) .get(); if (clusterHealthResponse.isTimedOut()) { - final String hotThreads = clusterAdmin().prepareNodesHotThreads() - .setIgnoreIdleThreads(false) - .get() - .getNodes() - .stream() - .map(NodeHotThreads::getHotThreads) - .collect(Collectors.joining("\n")); - final ClusterState clusterState = clusterAdmin().prepareState().get().getState(); - logger.info( - "timed out for waiting for relocation iteration [{}] \ncluster state {} \nhot threads {}", - i, - clusterState, - hotThreads + HotThreads.logLocalHotThreads( + logger, + Level.INFO, + "timed out waiting for relocation iteration [" + i + "]", + ReferenceDocs.LOGGING ); + final ClusterState clusterState = clusterAdmin().prepareState().get().getState(); + logger.info("timed out for waiting for relocation iteration [{}] \ncluster state {}", i, clusterState); finished.set(true); indexingThread.join(); throw new AssertionError("timed out waiting for relocation iteration [" + i + "] "); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java index d40d2e02415b1..5dcd8b5b0e34f 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/indices/recovery/IndexRecoveryIT.java @@ -1745,12 +1745,12 @@ public void testReservesBytesDuringPeerRecoveryPhaseOne() throws Exception { .getNodes() .get(0) .getIndices(); - assertThat(nodeIndicesStats.getStore().getReservedSize().getBytes(), equalTo(0L)); + assertThat(nodeIndicesStats.getStore().reservedSizeInBytes(), equalTo(0L)); assertThat( nodeIndicesStats.getShardStats(clusterState.metadata().index(indexName).getIndex()) .stream() .flatMap(s -> Arrays.stream(s.getShards())) - .map(s -> s.getStats().getStore().getReservedSize().getBytes()) + .map(s -> s.getStats().getStore().reservedSizeInBytes()) .toList(), everyItem(equalTo(StoreStats.UNKNOWN_RESERVED_BYTES)) ); @@ -1766,8 +1766,7 @@ public void testReservesBytesDuringPeerRecoveryPhaseOne() throws Exception { .get(0) .getIndices() .getStore() - .getReservedSize() - .getBytes(), + .reservedSizeInBytes(), greaterThan(0L) ); } @@ -1785,7 +1784,7 @@ public void testReservesBytesDuringPeerRecoveryPhaseOne() throws Exception { .get() .getNodes() .stream() - .mapToLong(n -> n.getIndices().getStore().getReservedSize().getBytes()) + .mapToLong(n -> n.getIndices().getStore().reservedSizeInBytes()) .sum(), equalTo(0L) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestStatsNamesAndTypesIT.java b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestStatsNamesAndTypesIT.java index 2a4174ba427af..ded319fd0848d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestStatsNamesAndTypesIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/ingest/IngestStatsNamesAndTypesIT.java @@ -143,7 +143,10 @@ public void testIngestStatsNamesAndTypes() throws IOException { builder.startObject(); response.toXContent(builder, new ToXContent.MapParams(Map.of())); builder.endObject(); - Map stats = createParser(JsonXContent.jsonXContent, Strings.toString(builder)).map(); + Map stats; + try (var parser = createParser(JsonXContent.jsonXContent, Strings.toString(builder))) { + stats = parser.map(); + } int setProcessorCount = path(stats, "nodes.ingest.processor_stats.set.count"); assertThat(setProcessorCount, equalTo(3)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java index 16a570b6cd2fd..dc612d6bad5ce 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/aggregations/pipeline/BucketScriptIT.java @@ -657,10 +657,10 @@ public void testSingleBucketPathAgg() throws Exception { .field("lang", CustomScriptPlugin.NAME) .endObject() .endObject(); - BucketScriptPipelineAggregationBuilder bucketScriptAgg = BucketScriptPipelineAggregationBuilder.PARSER.parse( - createParser(content), - "seriesArithmetic" - ); + BucketScriptPipelineAggregationBuilder bucketScriptAgg; + try (var parser = createParser(content)) { + bucketScriptAgg = BucketScriptPipelineAggregationBuilder.PARSER.parse(parser, "seriesArithmetic"); + } assertNoFailuresAndResponse( prepareSearch("idx", "idx_unmapped").addAggregation( @@ -703,10 +703,10 @@ public void testArrayBucketPathAgg() throws Exception { .field("lang", CustomScriptPlugin.NAME) .endObject() .endObject(); - BucketScriptPipelineAggregationBuilder bucketScriptAgg = BucketScriptPipelineAggregationBuilder.PARSER.parse( - createParser(content), - "seriesArithmetic" - ); + BucketScriptPipelineAggregationBuilder bucketScriptAgg; + try (var parser = createParser(content)) { + bucketScriptAgg = BucketScriptPipelineAggregationBuilder.PARSER.parse(parser, "seriesArithmetic"); + } assertNoFailuresAndResponse( prepareSearch("idx", "idx_unmapped").addAggregation( @@ -761,10 +761,10 @@ public void testObjectBucketPathAgg() throws Exception { .field("lang", CustomScriptPlugin.NAME) .endObject() .endObject(); - BucketScriptPipelineAggregationBuilder bucketScriptAgg = BucketScriptPipelineAggregationBuilder.PARSER.parse( - createParser(content), - "seriesArithmetic" - ); + BucketScriptPipelineAggregationBuilder bucketScriptAgg; + try (var parser = createParser(content)) { + bucketScriptAgg = BucketScriptPipelineAggregationBuilder.PARSER.parse(parser, "seriesArithmetic"); + } assertNoFailuresAndResponse( prepareSearch("idx", "idx_unmapped").addAggregation( diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java index d76031d402af0..fb54de209441a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/searchafter/SearchAfterIT.java @@ -473,7 +473,7 @@ public void testScrollAndSearchAfterWithBigIndex() { { OpenPointInTimeRequest openPITRequest = new OpenPointInTimeRequest("test").keepAlive(TimeValue.timeValueMinutes(5)); pitID = client().execute(TransportOpenPointInTimeAction.TYPE, openPITRequest).actionGet().getPointInTimeId(); - SearchRequest searchRequest = new SearchRequest("test").source( + SearchRequest searchRequest = new SearchRequest().source( new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(pitID).setKeepAlive(TimeValue.timeValueMinutes(5))) .sort("timestamp") ); @@ -509,7 +509,7 @@ public void testScrollAndSearchAfterWithBigIndex() { { OpenPointInTimeRequest openPITRequest = new OpenPointInTimeRequest("test").keepAlive(TimeValue.timeValueMinutes(5)); pitID = client().execute(TransportOpenPointInTimeAction.TYPE, openPITRequest).actionGet().getPointInTimeId(); - SearchRequest searchRequest = new SearchRequest("test").source( + SearchRequest searchRequest = new SearchRequest().source( new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(pitID).setKeepAlive(TimeValue.timeValueMinutes(5))) .sort(SortBuilders.pitTiebreaker()) ); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java index cb13fca85541f..d88a26fefa831 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/simple/SimpleSearchIT.java @@ -459,10 +459,11 @@ public void testTermQueryBigInt() throws Exception { .get(); String queryJson = "{ \"field\" : { \"value\" : 80315953321748200608 } }"; - XContentParser parser = createParser(JsonXContent.jsonXContent, queryJson); - parser.nextToken(); - TermQueryBuilder query = TermQueryBuilder.fromXContent(parser); - assertHitCount(prepareSearch("idx").setQuery(query), 1); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, queryJson)) { + parser.nextToken(); + TermQueryBuilder query = TermQueryBuilder.fromXContent(parser); + assertHitCount(prepareSearch("idx").setQuery(query), 1); + } } public void testTooLongRegexInRegexpQuery() throws Exception { diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java index 93340bedbdae3..c6b913185756a 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/slice/SearchSliceIT.java @@ -222,7 +222,7 @@ private void assertSearchSlicesWithPointInTime(String sliceField, String sortFie for (int id = 0; id < numSlice; id++) { int numSliceResults = 0; - SearchRequestBuilder request = prepareSearch("test").slice(new SliceBuilder(sliceField, id, numSlice)) + SearchRequestBuilder request = prepareSearch().slice(new SliceBuilder(sliceField, id, numSlice)) .setPointInTime(new PointInTimeBuilder(pointInTimeId)) .addSort(SortBuilders.fieldSort(sortField)) .setSize(randomIntBetween(10, 100)); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoryThrottlingStatsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoryThrottlingStatsIT.java index 0f0858982b4ad..d8bc9327a2edd 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoryThrottlingStatsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/RepositoryThrottlingStatsIT.java @@ -37,7 +37,7 @@ public void testRepositoryThrottlingStats() throws Exception { IndexStats indexStats = indicesStats.getIndex("test-idx"); long totalSizeInBytes = 0; for (ShardStats shard : indexStats.getShards()) { - totalSizeInBytes += shard.getStats().getStore().getSizeInBytes(); + totalSizeInBytes += shard.getStats().getStore().sizeInBytes(); } logger.info("--> total shards size: {} bytes", totalSizeInBytes); diff --git a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java index 7eaa49b27007d..fa49dc26f2259 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/snapshots/SnapshotStressTestsIT.java @@ -8,6 +8,7 @@ package org.elasticsearch.snapshots; +import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.tests.util.LuceneTestCase; @@ -16,7 +17,6 @@ import org.elasticsearch.action.ShardOperationFailedException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequestBuilder; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequestBuilder; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequestBuilder; @@ -34,6 +34,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; @@ -46,6 +47,7 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.monitor.jvm.HotThreads; import org.elasticsearch.repositories.RepositoryCleanupResult; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.test.InternalTestCluster; @@ -371,16 +373,11 @@ private void acquirePermitsAtEnd( "--> current cluster state:\n{}", Strings.toString(clusterAdmin().prepareState().get().getState(), true, true) ); - logger.info( - "--> hot threads:\n{}", - clusterAdmin().prepareNodesHotThreads() - .setThreads(99999) - .setIgnoreIdleThreads(false) - .get() - .getNodes() - .stream() - .map(NodeHotThreads::getHotThreads) - .collect(Collectors.joining("\n")) + HotThreads.logLocalHotThreads( + logger, + Level.INFO, + "hot threads while failing to acquire permit [" + label + "]", + ReferenceDocs.LOGGING ); failedPermitAcquisitions.add(label); } diff --git a/server/src/main/java/module-info.java b/server/src/main/java/module-info.java index 613e6868b8e9f..e72cb6c53e8e5 100644 --- a/server/src/main/java/module-info.java +++ b/server/src/main/java/module-info.java @@ -419,7 +419,8 @@ provides org.apache.lucene.codecs.PostingsFormat with org.elasticsearch.index.codec.bloomfilter.ES85BloomFilterPostingsFormat, - org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat; + org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat, + org.elasticsearch.index.codec.postings.ES812PostingsFormat; provides org.apache.lucene.codecs.DocValuesFormat with ES87TSDBDocValuesFormat; exports org.elasticsearch.cluster.routing.allocation.shards diff --git a/server/src/main/java/org/elasticsearch/Build.java b/server/src/main/java/org/elasticsearch/Build.java index 24cd82d29614e..0b8cd149744e3 100644 --- a/server/src/main/java/org/elasticsearch/Build.java +++ b/server/src/main/java/org/elasticsearch/Build.java @@ -47,8 +47,9 @@ private static class CurrentHolder { // finds the pluggable current build, or uses the local build as a fallback private static Build findCurrent() { - var buildExtension = ExtensionLoader.loadSingleton(ServiceLoader.load(BuildExtension.class), () -> Build::findLocalBuild); - return buildExtension.getCurrentBuild(); + return ExtensionLoader.loadSingleton(ServiceLoader.load(BuildExtension.class)) + .map(BuildExtension::getCurrentBuild) + .orElseGet(Build::findLocalBuild); } } @@ -204,7 +205,7 @@ static URL getElasticsearchCodeSourceLocation() { public static Build readBuild(StreamInput in) throws IOException { final String flavor; if (in.getTransportVersion().before(TransportVersions.V_8_3_0) - || in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { + || in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { flavor = in.readString(); } else { flavor = "default"; @@ -234,7 +235,7 @@ public static Build readBuild(StreamInput in) throws IOException { version = versionMatcher.group(1); qualifier = versionMatcher.group(2); } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_041)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { minWireVersion = in.readString(); minIndexVersion = in.readString(); displayString = in.readString(); @@ -251,7 +252,7 @@ public static Build readBuild(StreamInput in) throws IOException { public static void writeBuild(Build build, StreamOutput out) throws IOException { if (out.getTransportVersion().before(TransportVersions.V_8_3_0) - || out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { + || out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { out.writeString(build.flavor()); } out.writeString(build.type().displayName()); @@ -265,7 +266,7 @@ public static void writeBuild(Build build, StreamOutput out) throws IOException out.writeBoolean(build.isSnapshot()); out.writeString(build.qualifiedVersion()); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_041)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { out.writeString(build.minWireCompatVersion()); out.writeString(build.minIndexCompatVersion()); out.writeString(build.displayString()); diff --git a/server/src/main/java/org/elasticsearch/TransportVersion.java b/server/src/main/java/org/elasticsearch/TransportVersion.java index 92bb88f16385d..d3224bb048393 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersion.java +++ b/server/src/main/java/org/elasticsearch/TransportVersion.java @@ -109,13 +109,11 @@ public String toString() { private static class CurrentHolder { private static final TransportVersion CURRENT = findCurrent(); - // finds the pluggable current version, or uses the given fallback + // finds the pluggable current version private static TransportVersion findCurrent() { - var versionExtension = ExtensionLoader.loadSingleton(ServiceLoader.load(VersionExtension.class), () -> null); - if (versionExtension == null) { - return TransportVersions.LATEST_DEFINED; - } - var version = versionExtension.getCurrentTransportVersion(TransportVersions.LATEST_DEFINED); + var version = ExtensionLoader.loadSingleton(ServiceLoader.load(VersionExtension.class)) + .map(e -> e.getCurrentTransportVersion(TransportVersions.LATEST_DEFINED)) + .orElse(TransportVersions.LATEST_DEFINED); assert version.onOrAfter(TransportVersions.LATEST_DEFINED); return version; } diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index ad29384b16f45..1e058f5a01392 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -93,27 +93,6 @@ static TransportVersion def(int id) { * Detached transport versions added below here. */ public static final TransportVersion V_8_500_020 = def(8_500_020); - public static final TransportVersion V_8_500_040 = def(8_500_040); - public static final TransportVersion V_8_500_041 = def(8_500_041); - public static final TransportVersion V_8_500_042 = def(8_500_042); - public static final TransportVersion V_8_500_043 = def(8_500_043); - public static final TransportVersion V_8_500_044 = def(8_500_044); - public static final TransportVersion V_8_500_045 = def(8_500_045); - public static final TransportVersion V_8_500_046 = def(8_500_046); - public static final TransportVersion V_8_500_047 = def(8_500_047); - public static final TransportVersion V_8_500_048 = def(8_500_048); - public static final TransportVersion V_8_500_049 = def(8_500_049); - public static final TransportVersion V_8_500_050 = def(8_500_050); - public static final TransportVersion V_8_500_051 = def(8_500_051); - public static final TransportVersion V_8_500_052 = def(8_500_052); - public static final TransportVersion V_8_500_053 = def(8_500_053); - public static final TransportVersion V_8_500_054 = def(8_500_054); - public static final TransportVersion V_8_500_055 = def(8_500_055); - public static final TransportVersion V_8_500_056 = def(8_500_056); - public static final TransportVersion V_8_500_057 = def(8_500_057); - public static final TransportVersion V_8_500_058 = def(8_500_058); - public static final TransportVersion V_8_500_059 = def(8_500_059); - public static final TransportVersion V_8_500_060 = def(8_500_060); public static final TransportVersion V_8_500_061 = def(8_500_061); public static final TransportVersion V_8_500_062 = def(8_500_062); public static final TransportVersion V_8_500_063 = def(8_500_063); @@ -192,6 +171,9 @@ static TransportVersion def(int id) { public static final TransportVersion INFERENCE_SERVICE_EMBEDDING_SIZE_ADDED = def(8_559_00_0); public static final TransportVersion ENRICH_ELASTICSEARCH_VERSION_REMOVED = def(8_560_00_0); public static final TransportVersion NODE_STATS_REQUEST_SIMPLIFIED = def(8_561_00_0); + public static final TransportVersion TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED = def(8_562_00_0); + public static final TransportVersion ESQL_ASYNC_QUERY = def(8_563_00_0); + public static final TransportVersion ESQL_STATUS_INCLUDE_LUCENE_QUERIES = def(8_564_00_0); /* * STOP! READ THIS FIRST! No, really, diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 3d6995bd9e90f..4181b077cb185 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -116,6 +116,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_7_17_14 = new Version(7_17_14_99); public static final Version V_7_17_15 = new Version(7_17_15_99); public static final Version V_7_17_16 = new Version(7_17_16_99); + public static final Version V_7_17_17 = new Version(7_17_17_99); public static final Version V_8_0_0 = new Version(8_00_00_99); public static final Version V_8_0_1 = new Version(8_00_01_99); public static final Version V_8_1_0 = new Version(8_01_00_99); @@ -158,6 +159,7 @@ public class Version implements VersionId, ToXContentFragment { public static final Version V_8_11_1 = new Version(8_11_01_99); public static final Version V_8_11_2 = new Version(8_11_02_99); public static final Version V_8_11_3 = new Version(8_11_03_99); + public static final Version V_8_11_4 = new Version(8_11_04_99); public static final Version V_8_12_0 = new Version(8_12_00_99); public static final Version V_8_13_0 = new Version(8_13_00_99); public static final Version CURRENT = V_8_13_0; diff --git a/server/src/main/java/org/elasticsearch/action/ActionFuture.java b/server/src/main/java/org/elasticsearch/action/ActionFuture.java index e51e31f4c03ce..061875e42fec8 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionFuture.java +++ b/server/src/main/java/org/elasticsearch/action/ActionFuture.java @@ -27,22 +27,6 @@ public interface ActionFuture extends Future { */ T actionGet(); - /** - * Similar to {@link #get(long, java.util.concurrent.TimeUnit)}, just catching the {@link InterruptedException} and throwing - * an {@link IllegalStateException} instead. Also catches - * {@link java.util.concurrent.ExecutionException} and throws the actual cause instead. - */ - T actionGet(String timeout); - - /** - * Similar to {@link #get(long, java.util.concurrent.TimeUnit)}, just catching the {@link InterruptedException} and throwing - * an {@link IllegalStateException} instead. Also catches - * {@link java.util.concurrent.ExecutionException} and throws the actual cause instead. - * - * @param timeoutMillis Timeout in millis - */ - T actionGet(long timeoutMillis); - /** * Similar to {@link #get(long, java.util.concurrent.TimeUnit)}, just catching the {@link InterruptedException} and throwing * an {@link IllegalStateException} instead. Also catches diff --git a/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java index 6209e9fce390e..32d65d743e6a6 100644 --- a/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java +++ b/server/src/main/java/org/elasticsearch/action/ActionRequestBuilder.java @@ -48,13 +48,6 @@ public Response get(TimeValue timeout) { return execute().actionGet(timeout); } - /** - * Short version of execute().actionGet(). - */ - public Response get(String timeout) { - return execute().actionGet(timeout); - } - public void execute(ActionListener listener) { client.execute(action, request, listener); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequestBuilder.java deleted file mode 100644 index 6593b90fb7f65..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/hotthreads/NodesHotThreadsRequestBuilder.java +++ /dev/null @@ -1,49 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.node.hotthreads; - -import org.elasticsearch.action.support.nodes.NodesOperationRequestBuilder; -import org.elasticsearch.client.internal.ElasticsearchClient; -import org.elasticsearch.core.TimeValue; -import org.elasticsearch.monitor.jvm.HotThreads; - -public class NodesHotThreadsRequestBuilder extends NodesOperationRequestBuilder< - NodesHotThreadsRequest, - NodesHotThreadsResponse, - NodesHotThreadsRequestBuilder> { - - public NodesHotThreadsRequestBuilder(ElasticsearchClient client) { - super(client, TransportNodesHotThreadsAction.TYPE, new NodesHotThreadsRequest()); - } - - public NodesHotThreadsRequestBuilder setThreads(int threads) { - request.threads(threads); - return this; - } - - public NodesHotThreadsRequestBuilder setIgnoreIdleThreads(boolean ignoreIdleThreads) { - request.ignoreIdleThreads(ignoreIdleThreads); - return this; - } - - public NodesHotThreadsRequestBuilder setType(HotThreads.ReportType type) { - request.type(type); - return this; - } - - public NodesHotThreadsRequestBuilder setSortOrder(HotThreads.SortOrder sortOrder) { - request.sortOrder(sortOrder); - return this; - } - - public NodesHotThreadsRequestBuilder setInterval(TimeValue interval) { - request.interval(interval); - return this; - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java index 7fa97f1ee14b7..ed63e6d1b4474 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/node/reload/TransportNodesReloadSecureSettingsAction.java @@ -122,6 +122,8 @@ protected NodesReloadSecureSettingsResponse.NodeResponse nodeOperation( keystore.decrypt(request.hasPassword() ? request.getSecureSettingsPassword().getChars() : new char[0]); // add the keystore to the original node settings object final Settings settingsWithKeystore = Settings.builder().put(environment.settings(), false).setSecureSettings(keystore).build(); + clusterService.getClusterSettings().validate(settingsWithKeystore, true); + final List exceptions = new ArrayList<>(); // broadcast the new settings object (with the open embedded keystore) to all reloadable plugins pluginsService.filterPlugins(ReloadablePlugin.class).forEach(p -> { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java index f4e301e0748bb..29bffa3949258 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/state/TransportClusterStateAction.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.master.TransportMasterNodeReadAction; import org.elasticsearch.cluster.ClusterState; @@ -112,7 +113,7 @@ public void onNewClusterState(ClusterState newState) { } if (acceptableClusterStatePredicate.test(newState)) { - ActionListener.completeWith(listener, () -> buildResponse(request, newState)); + executor.execute(ActionRunnable.supply(listener, () -> buildResponse(request, newState))); } else { listener.onFailure( new NotMasterException( @@ -150,6 +151,8 @@ private static Map> getClusterFeatures(ClusterState clusterS } private ClusterStateResponse buildResponse(final ClusterStateRequest request, final ClusterState currentState) { + ThreadPool.assertCurrentThreadPool(ThreadPool.Names.MANAGEMENT); // too heavy to construct & serialize cluster state without forking + logger.trace("Serving cluster state request using version {}", currentState.version()); ClusterState.Builder builder = ClusterState.builder(currentState.getClusterName()); builder.version(currentState.version()); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java index f8d894e4de48b..81a26999d2907 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/stats/AnalysisStats.java @@ -9,6 +9,7 @@ package org.elasticsearch.action.admin.cluster.stats; import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.metadata.Metadata; @@ -36,14 +37,12 @@ import java.util.Set; import java.util.TreeMap; -import static org.elasticsearch.TransportVersions.V_8_500_045; - /** * Statistics about analysis usage. */ public final class AnalysisStats implements ToXContentFragment, Writeable { - private static final TransportVersion SYNONYM_SETS_VERSION = V_8_500_045; + private static final TransportVersion SYNONYM_SETS_VERSION = TransportVersions.V_8_500_061; private static final Set SYNONYM_FILTER_TYPES = Set.of("synonym", "synonym_graph"); diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksRequestBuilder.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksRequestBuilder.java deleted file mode 100644 index aa3f226d23c9d..0000000000000 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/tasks/PendingClusterTasksRequestBuilder.java +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.action.admin.cluster.tasks; - -import org.elasticsearch.action.support.master.MasterNodeReadOperationRequestBuilder; -import org.elasticsearch.client.internal.ElasticsearchClient; - -public class PendingClusterTasksRequestBuilder extends MasterNodeReadOperationRequestBuilder< - PendingClusterTasksRequest, - PendingClusterTasksResponse, - PendingClusterTasksRequestBuilder> { - - public PendingClusterTasksRequestBuilder(ElasticsearchClient client) { - super(client, TransportPendingClusterTasksAction.TYPE, new PendingClusterTasksRequest()); - } -} diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java index 31c5f57ab5eef..e2894f072011c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/ReloadAnalyzersRequest.java @@ -25,7 +25,7 @@ public class ReloadAnalyzersRequest extends BroadcastRequest statuses = EnumSet.of(ClusterHealthStatus.YELLOW, ClusterHealthStatus.RED); private int maxConcurrentShardRequests = DEFAULT_MAX_CONCURRENT_SHARD_REQUESTS; diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeNumberOfShardsCalculator.java b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeNumberOfShardsCalculator.java index 31807919fd9d9..076841e3efadc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeNumberOfShardsCalculator.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/shrink/ResizeNumberOfShardsCalculator.java @@ -69,7 +69,7 @@ public int calculate(Integer numberOfShards, ByteSizeValue maxPrimaryShardSize, } } else if (maxPrimaryShardSize != null) { int sourceIndexShardsNum = sourceMetadata.getNumberOfShards(); - long sourceIndexStorageBytes = indexStoreStats.getSizeInBytes(); + long sourceIndexStorageBytes = indexStoreStats.sizeInBytes(); long maxPrimaryShardSizeBytes = maxPrimaryShardSize.getBytes(); long minShardsNum = sourceIndexStorageBytes / maxPrimaryShardSizeBytes; if (minShardsNum * maxPrimaryShardSizeBytes < sourceIndexStorageBytes) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java index 76500964be750..d0da715b17168 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/CommonStats.java @@ -48,7 +48,7 @@ public class CommonStats implements Writeable, ToXContentFragment { private static final TransportVersion VERSION_SUPPORTING_NODE_MAPPINGS = TransportVersions.V_8_5_0; - private static final TransportVersion VERSION_SUPPORTING_DENSE_VECTOR_STATS = TransportVersions.V_8_500_058; + private static final TransportVersion VERSION_SUPPORTING_DENSE_VECTOR_STATS = TransportVersions.V_8_500_061; @Nullable public DocsStats docs; diff --git a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java index a2f4d6408a3a4..61c979f9494b5 100644 --- a/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java +++ b/server/src/main/java/org/elasticsearch/action/downsample/DownsampleAction.java @@ -62,7 +62,7 @@ public Request(StreamInput in) throws IOException { super(in); sourceIndex = in.readString(); targetIndex = in.readString(); - waitTimeout = in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_054) + waitTimeout = in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) ? TimeValue.parseTimeValue(in.readString(), "timeout") : DEFAULT_WAIT_TIMEOUT; downsampleConfig = new DownsampleConfig(in); @@ -89,7 +89,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(sourceIndex); out.writeString(targetIndex); out.writeString( - out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_054) + out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) ? waitTimeout.getStringRep() : DEFAULT_WAIT_TIMEOUT.getStringRep() ); diff --git a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java index 8b5e077fd85b8..12f7c21cba8e1 100644 --- a/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java +++ b/server/src/main/java/org/elasticsearch/action/index/IndexRequest.java @@ -29,12 +29,14 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.StringLiteralDeduplicator; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.index.Index; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.ingest.IngestService; import org.elasticsearch.plugins.internal.DocumentParsingObserver; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -71,7 +73,7 @@ public class IndexRequest extends ReplicatedWriteRequest implements DocWriteRequest, CompositeIndicesRequest { private static final long SHALLOW_SIZE = RamUsageEstimator.shallowSizeOfInstance(IndexRequest.class); - private static final TransportVersion PIPELINES_HAVE_RUN_FIELD_ADDED = TransportVersions.V_8_500_049; + private static final TransportVersion PIPELINES_HAVE_RUN_FIELD_ADDED = TransportVersions.V_8_500_061; /** * Max length of the source document to include into string() @@ -153,11 +155,9 @@ public IndexRequest(@Nullable ShardId shardId, StreamInput in) throws IOExceptio opType = OpType.fromId(in.readByte()); version = in.readLong(); versionType = VersionType.fromValue(in.readByte()); - pipeline = in.readOptionalString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_5_0)) { - finalPipeline = in.readOptionalString(); - } + pipeline = readPipelineName(in); if (in.getTransportVersion().onOrAfter(TransportVersions.V_7_5_0)) { + finalPipeline = readPipelineName(in); isPipelineResolved = in.readBoolean(); } isRetry = in.readBoolean(); @@ -204,6 +204,22 @@ public IndexRequest(String index) { this.index = index; } + private static final StringLiteralDeduplicator pipelineNameDeduplicator = new StringLiteralDeduplicator(); + + // reads pipeline name from the stream and deduplicates it to save heap on large bulk requests + @Nullable + private static String readPipelineName(StreamInput in) throws IOException { + final String read = in.readOptionalString(); + if (read == null) { + return null; + } + if (IngestService.NOOP_PIPELINE_NAME.equals(read)) { + // common path of no pipeline set + return IngestService.NOOP_PIPELINE_NAME; + } + return pipelineNameDeduplicator.deduplicate(read); + } + @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = super.validate(); diff --git a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java index d821764e788b7..c77a03824a75c 100644 --- a/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/AbstractSearchAsyncAction.java @@ -29,12 +29,12 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchContextMissingException; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; @@ -212,7 +212,9 @@ public final void start() { // total hits is null in the response if the tracking of total hits is disabled boolean withTotalHits = trackTotalHitsUpTo != SearchContext.TRACK_TOTAL_HITS_DISABLED; sendSearchResponse( - withTotalHits ? InternalSearchResponse.EMPTY_WITH_TOTAL_HITS : InternalSearchResponse.EMPTY_WITHOUT_TOTAL_HITS, + withTotalHits + ? new SearchResponseSections(SearchHits.EMPTY_WITH_TOTAL_HITS, null, null, false, null, null, 1) + : new SearchResponseSections(SearchHits.EMPTY_WITHOUT_TOTAL_HITS, null, null, false, null, null, 1), new AtomicArray<>(0) ); return; @@ -655,7 +657,7 @@ public boolean isPartOfPointInTime(ShardSearchContextId contextId) { } private SearchResponse buildSearchResponse( - InternalSearchResponse internalSearchResponse, + SearchResponseSections internalSearchResponse, ShardSearchFailure[] failures, String scrollId, String searchContextId @@ -682,7 +684,7 @@ boolean buildPointInTimeFromSearchResults() { } @Override - public void sendSearchResponse(InternalSearchResponse internalSearchResponse, AtomicArray queryResults) { + public void sendSearchResponse(SearchResponseSections internalSearchResponse, AtomicArray queryResults) { ShardSearchFailure[] failures = buildShardFailures(); Boolean allowPartialResults = request.allowPartialSearchResults(); assert allowPartialResults != null : "SearchRequest missing setting for allowPartialSearchResults"; diff --git a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java index 2df8b60cd9728..00e2b41fde3da 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java @@ -18,7 +18,6 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.collapse.CollapseBuilder; -import org.elasticsearch.search.internal.InternalSearchResponse; import java.util.Iterator; import java.util.List; @@ -31,13 +30,13 @@ */ final class ExpandSearchPhase extends SearchPhase { private final SearchPhaseContext context; - private final InternalSearchResponse searchResponse; + private final SearchHits searchHits; private final Supplier nextPhase; - ExpandSearchPhase(SearchPhaseContext context, InternalSearchResponse searchResponse, Supplier nextPhase) { + ExpandSearchPhase(SearchPhaseContext context, SearchHits searchHits, Supplier nextPhase) { super("expand"); this.context = context; - this.searchResponse = searchResponse; + this.searchHits = searchHits; this.nextPhase = nextPhase; } @@ -53,7 +52,7 @@ private boolean isCollapseRequest() { @Override public void run() { - if (isCollapseRequest() && searchResponse.hits().getHits().length > 0) { + if (isCollapseRequest() && searchHits.getHits().length > 0) { SearchRequest searchRequest = context.getRequest(); CollapseBuilder collapseBuilder = searchRequest.source().collapse(); final List innerHitBuilders = collapseBuilder.getInnerHits(); @@ -61,7 +60,7 @@ public void run() { if (collapseBuilder.getMaxConcurrentGroupRequests() > 0) { multiRequest.maxConcurrentSearchRequests(collapseBuilder.getMaxConcurrentGroupRequests()); } - for (SearchHit hit : searchResponse.hits().getHits()) { + for (SearchHit hit : searchHits.getHits()) { BoolQueryBuilder groupQuery = new BoolQueryBuilder(); Object collapseValue = hit.field(collapseBuilder.getField()).getValue(); if (collapseValue != null) { @@ -85,7 +84,7 @@ public void run() { } context.getSearchTransport().sendExecuteMultiSearch(multiRequest, context.getTask(), ActionListener.wrap(response -> { Iterator it = response.iterator(); - for (SearchHit hit : searchResponse.hits.getHits()) { + for (SearchHit hit : searchHits.getHits()) { for (InnerHitBuilder innerHitBuilder : innerHitBuilders) { MultiSearchResponse.Item item = it.next(); if (item.isFailure()) { diff --git a/server/src/main/java/org/elasticsearch/action/search/FetchLookupFieldsPhase.java b/server/src/main/java/org/elasticsearch/action/search/FetchLookupFieldsPhase.java index 9f1da9a7e2b03..9c50d534ac4ce 100644 --- a/server/src/main/java/org/elasticsearch/action/search/FetchLookupFieldsPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/FetchLookupFieldsPhase.java @@ -15,9 +15,9 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.AtomicArray; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.fetch.subphase.LookupField; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.transport.RemoteClusterService; import java.util.ArrayList; @@ -33,10 +33,10 @@ */ final class FetchLookupFieldsPhase extends SearchPhase { private final SearchPhaseContext context; - private final InternalSearchResponse searchResponse; + private final SearchResponseSections searchResponse; private final AtomicArray queryResults; - FetchLookupFieldsPhase(SearchPhaseContext context, InternalSearchResponse searchResponse, AtomicArray queryResults) { + FetchLookupFieldsPhase(SearchPhaseContext context, SearchResponseSections searchResponse, AtomicArray queryResults) { super("fetch_lookup_fields"); this.context = context; this.searchResponse = searchResponse; @@ -47,9 +47,9 @@ private record Cluster(String clusterAlias, List hitsWithLookupFields } - private static List groupLookupFieldsByClusterAlias(InternalSearchResponse response) { + private static List groupLookupFieldsByClusterAlias(SearchHits searchHits) { final Map> perClusters = new HashMap<>(); - for (SearchHit hit : response.hits.getHits()) { + for (SearchHit hit : searchHits.getHits()) { String clusterAlias = hit.getClusterAlias() != null ? hit.getClusterAlias() : RemoteClusterService.LOCAL_CLUSTER_GROUP_KEY; if (hit.hasLookupFields()) { perClusters.computeIfAbsent(clusterAlias, k -> new ArrayList<>()).add(hit); @@ -70,7 +70,7 @@ private static List groupLookupFieldsByClusterAlias(InternalSearchRespo @Override public void run() { - final List clusters = groupLookupFieldsByClusterAlias(searchResponse); + final List clusters = groupLookupFieldsByClusterAlias(searchResponse.hits); if (clusters.isEmpty()) { context.sendSearchResponse(searchResponse, queryResults); return; diff --git a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java index e8d3ded154f55..27ff6a2ab8309 100644 --- a/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/FetchSearchPhase.java @@ -16,7 +16,6 @@ import org.elasticsearch.search.dfs.AggregatedDfs; import org.elasticsearch.search.fetch.FetchSearchResult; import org.elasticsearch.search.fetch.ShardFetchSearchRequest; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.transport.Transport; @@ -31,7 +30,7 @@ final class FetchSearchPhase extends SearchPhase { private final ArraySearchPhaseResults fetchResults; private final AtomicArray queryResults; - private final BiFunction, SearchPhase> nextPhaseFactory; + private final BiFunction, SearchPhase> nextPhaseFactory; private final SearchPhaseContext context; private final Logger logger; private final SearchPhaseResults resultConsumer; @@ -45,7 +44,7 @@ final class FetchSearchPhase extends SearchPhase { context, (response, queryPhaseResults) -> new ExpandSearchPhase( context, - response, + response.hits, () -> new FetchLookupFieldsPhase(context, response, queryPhaseResults) ) ); @@ -55,7 +54,7 @@ final class FetchSearchPhase extends SearchPhase { SearchPhaseResults resultConsumer, AggregatedDfs aggregatedDfs, SearchPhaseContext context, - BiFunction, SearchPhase> nextPhaseFactory + BiFunction, SearchPhase> nextPhaseFactory ) { super("fetch"); if (context.getNumShards() != resultConsumer.getNumShards()) { @@ -230,11 +229,12 @@ private void moveToNextPhase( SearchPhaseController.ReducedQueryPhase reducedQueryPhase, AtomicArray fetchResultsArr ) { - final InternalSearchResponse internalResponse = SearchPhaseController.merge( - context.getRequest().scroll() != null, - reducedQueryPhase, - fetchResultsArr + context.executeNextPhase( + this, + nextPhaseFactory.apply( + SearchPhaseController.merge(context.getRequest().scroll() != null, reducedQueryPhase, fetchResultsArr), + queryResults + ) ); - context.executeNextPhase(this, nextPhaseFactory.apply(internalResponse, queryResults)); } } diff --git a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java index cadcd6ca57334..2e50667fc02b1 100644 --- a/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/MultiSearchRequest.java @@ -19,6 +19,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.RestApiVersion; import org.elasticsearch.rest.action.search.RestMultiSearchAction; import org.elasticsearch.rest.action.search.RestSearchAction; @@ -33,7 +34,6 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; -import java.io.InputStream; import java.util.ArrayList; import java.util.List; import java.util.Locale; @@ -239,8 +239,11 @@ public static void readMultiLineFormat( // now parse the action if (nextMarker - from > 0) { try ( - InputStream stream = data.slice(from, nextMarker - from).streamInput(); - XContentParser parser = xContent.createParser(parserConfig, stream) + XContentParser parser = XContentHelper.createParserNotCompressed( + parserConfig, + data.slice(from, nextMarker - from), + xContent.type() + ) ) { Map source = parser.map(); Object expandWildcards = null; @@ -301,8 +304,13 @@ public static void readMultiLineFormat( if (nextMarker == -1) { break; } - BytesReference bytes = data.slice(from, nextMarker - from); - try (InputStream stream = bytes.streamInput(); XContentParser parser = xContent.createParser(parserConfig, stream)) { + try ( + XContentParser parser = XContentHelper.createParserNotCompressed( + parserConfig, + data.slice(from, nextMarker - from), + xContent.type() + ) + ) { consumer.accept(searchRequest, parser); } // move pointers diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchContextId.java b/server/src/main/java/org/elasticsearch/action/search/SearchContextId.java index f10650a6401d6..83a6870d72491 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchContextId.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchContextId.java @@ -32,10 +32,10 @@ import java.nio.charset.StandardCharsets; import java.util.Base64; import java.util.Collections; -import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.TreeSet; import java.util.stream.Collectors; public final class SearchContextId { @@ -110,12 +110,30 @@ public static SearchContextId decode(NamedWriteableRegistry namedWriteableRegist } } + public static String[] decodeIndices(String id) { + try ( + var decodedInputStream = Base64.getUrlDecoder().wrap(new ByteArrayInputStream(id.getBytes(StandardCharsets.ISO_8859_1))); + var in = new InputStreamStreamInput(decodedInputStream) + ) { + final TransportVersion version = TransportVersion.readVersion(in); + in.setTransportVersion(version); + final Map shards = Collections.unmodifiableMap( + in.readCollection(Maps::newHashMapWithExpectedSize, SearchContextId::readShardsMapEntry) + ); + return new SearchContextId(shards, Collections.emptyMap()).getActualIndices(); + } catch (IOException e) { + assert false : e; + throw new IllegalArgumentException(e); + } + } + private static void readShardsMapEntry(StreamInput in, Map shards) throws IOException { shards.put(new ShardId(in), new SearchContextIdForNode(in)); } public String[] getActualIndices() { - final Set indices = new HashSet<>(); + // ensure that the order is consistent + final Set indices = new TreeSet<>(); for (Map.Entry entry : shards().entrySet()) { final String indexName = entry.getKey().getIndexName(); final String clusterAlias = entry.getValue().getClusterAlias(); diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java index d70b99fe46c00..af9bcac8e3a33 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseContext.java @@ -14,7 +14,6 @@ import org.elasticsearch.core.Releasable; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.transport.Transport; @@ -64,7 +63,7 @@ interface SearchPhaseContext extends Executor { * @param internalSearchResponse the internal search response * @param queryResults the results of the query phase */ - void sendSearchResponse(InternalSearchResponse internalSearchResponse, AtomicArray queryResults); + void sendSearchResponse(SearchResponseSections internalSearchResponse, AtomicArray queryResults); /** * Notifies the top-level listener of the provided exception diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java index d4808def29d1f..e425d9d66dd69 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchPhaseController.java @@ -40,7 +40,6 @@ import org.elasticsearch.search.dfs.DfsKnnResults; import org.elasticsearch.search.dfs.DfsSearchResult; import org.elasticsearch.search.fetch.FetchSearchResult; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.profile.SearchProfileQueryPhaseResult; import org.elasticsearch.search.profile.SearchProfileResults; @@ -355,13 +354,13 @@ public static List[] fillDocIdsToLoad(int numShards, ScoreDoc[] shardDo * Expects sortedDocs to have top search docs across all shards, optionally followed by top suggest docs for each named * completion suggestion ordered by suggestion name */ - public static InternalSearchResponse merge( + public static SearchResponseSections merge( boolean ignoreFrom, ReducedQueryPhase reducedQueryPhase, AtomicArray fetchResultsArray ) { if (reducedQueryPhase.isEmptyResult) { - return InternalSearchResponse.EMPTY_WITH_TOTAL_HITS; + return new SearchResponseSections(SearchHits.EMPTY_WITH_TOTAL_HITS, null, null, false, null, null, 1); } ScoreDoc[] sortedDocs = reducedQueryPhase.sortedTopDocs.scoreDocs; var fetchResults = fetchResultsArray.asList(); @@ -753,14 +752,14 @@ public record ReducedQueryPhase( * Creates a new search response from the given merged hits. * @see #merge(boolean, ReducedQueryPhase, AtomicArray) */ - public InternalSearchResponse buildResponse(SearchHits hits, Collection fetchResults) { - return new InternalSearchResponse( + public SearchResponseSections buildResponse(SearchHits hits, Collection fetchResults) { + return new SearchResponseSections( hits, aggregations, suggest, - buildSearchProfileResults(fetchResults), timedOut, terminatedEarly, + buildSearchProfileResults(fetchResults), numReducePhases ); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index 7ac8c4d5299d4..456a574c6f6b2 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -407,6 +407,21 @@ public ActionRequestValidationException validate() { if (scroll) { validationException = addValidationError("using [point in time] is not allowed in a scroll context", validationException); } + if (indices().length > 0) { + validationException = addValidationError( + "[indices] cannot be used with point in time. Do not specify any index with point in time.", + validationException + ); + } + if (indicesOptions().equals(DEFAULT_INDICES_OPTIONS) == false) { + validationException = addValidationError("[indicesOptions] cannot be used with point in time", validationException); + } + if (routing() != null) { + validationException = addValidationError("[routing] cannot be used with point in time", validationException); + } + if (preference() != null) { + validationException = addValidationError("[preference] cannot be used with point in time", validationException); + } } else if (source != null && source.sorts() != null) { for (SortBuilder sortBuilder : source.sorts()) { if (sortBuilder instanceof FieldSortBuilder diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 9ff0f6273171b..d6a0153a235a2 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -28,7 +28,6 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; @@ -67,7 +66,13 @@ public class SearchResponse extends ActionResponse implements ChunkedToXContentO private static final ParseField TERMINATED_EARLY = new ParseField("terminated_early"); private static final ParseField NUM_REDUCE_PHASES = new ParseField("num_reduce_phases"); - private final SearchResponseSections internalResponse; + private final SearchHits hits; + private final Aggregations aggregations; + private final Suggest suggest; + private final SearchProfileResults profileResults; + private final boolean timedOut; + private final Boolean terminatedEarly; + private final int numReducePhases; private final String scrollId; private final String pointInTimeId; private final int totalShards; @@ -79,7 +84,13 @@ public class SearchResponse extends ActionResponse implements ChunkedToXContentO public SearchResponse(StreamInput in) throws IOException { super(in); - internalResponse = new InternalSearchResponse(in); + this.hits = new SearchHits(in); + this.aggregations = in.readBoolean() ? InternalAggregations.readFrom(in) : null; + this.suggest = in.readBoolean() ? new Suggest(in) : null; + this.timedOut = in.readBoolean(); + this.terminatedEarly = in.readOptionalBoolean(); + this.profileResults = in.readOptionalWriteable(SearchProfileResults::new); + this.numReducePhases = in.readVInt(); totalShards = in.readVInt(); successfulShards = in.readVInt(); int size = in.readVInt(); @@ -99,7 +110,13 @@ public SearchResponse(StreamInput in) throws IOException { } public SearchResponse( - SearchResponseSections internalResponse, + SearchHits hits, + Aggregations aggregations, + Suggest suggest, + boolean timedOut, + Boolean terminatedEarly, + SearchProfileResults profileResults, + int numReducePhases, String scrollId, int totalShards, int successfulShards, @@ -108,11 +125,63 @@ public SearchResponse( ShardSearchFailure[] shardFailures, Clusters clusters ) { - this(internalResponse, scrollId, totalShards, successfulShards, skippedShards, tookInMillis, shardFailures, clusters, null); + this( + hits, + aggregations, + suggest, + timedOut, + terminatedEarly, + profileResults, + numReducePhases, + scrollId, + totalShards, + successfulShards, + skippedShards, + tookInMillis, + shardFailures, + clusters, + null + ); + } + + public SearchResponse( + SearchResponseSections searchResponseSections, + String scrollId, + int totalShards, + int successfulShards, + int skippedShards, + long tookInMillis, + ShardSearchFailure[] shardFailures, + Clusters clusters, + String pointInTimeId + ) { + this( + searchResponseSections.hits, + searchResponseSections.aggregations, + searchResponseSections.suggest, + searchResponseSections.timedOut, + searchResponseSections.terminatedEarly, + searchResponseSections.profileResults, + searchResponseSections.numReducePhases, + scrollId, + totalShards, + successfulShards, + skippedShards, + tookInMillis, + shardFailures, + clusters, + pointInTimeId + ); } public SearchResponse( - SearchResponseSections internalResponse, + SearchHits hits, + Aggregations aggregations, + Suggest suggest, + boolean timedOut, + Boolean terminatedEarly, + SearchProfileResults profileResults, + int numReducePhases, String scrollId, int totalShards, int successfulShards, @@ -122,7 +191,13 @@ public SearchResponse( Clusters clusters, String pointInTimeId ) { - this.internalResponse = internalResponse; + this.hits = hits; + this.aggregations = aggregations; + this.suggest = suggest; + this.profileResults = profileResults; + this.timedOut = timedOut; + this.terminatedEarly = terminatedEarly; + this.numReducePhases = numReducePhases; this.scrollId = scrollId; this.pointInTimeId = pointInTimeId; this.clusters = clusters; @@ -144,7 +219,7 @@ public RestStatus status() { * The search hits. */ public SearchHits getHits() { - return internalResponse.hits(); + return hits; } /** @@ -152,7 +227,7 @@ public SearchHits getHits() { * either {@code null} or {@link InternalAggregations#EMPTY}. */ public @Nullable Aggregations getAggregations() { - return internalResponse.aggregations(); + return aggregations; } /** @@ -163,14 +238,14 @@ public boolean hasAggregations() { } public Suggest getSuggest() { - return internalResponse.suggest(); + return suggest; } /** * Has the search operation timed out. */ public boolean isTimedOut() { - return internalResponse.timedOut(); + return timedOut; } /** @@ -178,14 +253,14 @@ public boolean isTimedOut() { * terminateAfter */ public Boolean isTerminatedEarly() { - return internalResponse.terminatedEarly(); + return terminatedEarly; } /** * Returns the number of reduce phases applied to obtain this search response */ public int getNumReducePhases() { - return internalResponse.getNumReducePhases(); + return numReducePhases; } /** @@ -253,7 +328,10 @@ public String pointInTimeId() { */ @Nullable public Map getProfileResults() { - return internalResponse.profile(); + if (profileResults == null) { + return Collections.emptyMap(); + } + return profileResults.getShardResults(); } /** @@ -278,7 +356,27 @@ public Iterator innerToXContentChunked(ToXContent.Params p return Iterators.concat( ChunkedToXContentHelper.singleChunk(SearchResponse.this::headerToXContent), Iterators.single(clusters), - internalResponse.toXContentChunked(params) + Iterators.concat( + Iterators.flatMap(Iterators.single(hits), r -> r.toXContentChunked(params)), + Iterators.single((ToXContent) (b, p) -> { + if (aggregations != null) { + aggregations.toXContent(b, p); + } + return b; + }), + Iterators.single((b, p) -> { + if (suggest != null) { + suggest.toXContent(b, p); + } + return b; + }), + Iterators.single((b, p) -> { + if (profileResults != null) { + profileResults.toXContent(b, p); + } + return b; + }) + ) ); } @@ -396,17 +494,14 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE } } } - SearchResponseSections searchResponseSections = new SearchResponseSections( + return new SearchResponse( hits, aggs, suggest, timedOut, terminatedEarly, profile, - numReducePhases - ); - return new SearchResponse( - searchResponseSections, + numReducePhases, scrollId, totalShards, successfulShards, @@ -420,7 +515,13 @@ public static SearchResponse innerFromXContent(XContentParser parser) throws IOE @Override public void writeTo(StreamOutput out) throws IOException { - internalResponse.writeTo(out); + hits.writeTo(out); + out.writeOptionalWriteable((InternalAggregations) aggregations); + out.writeOptionalWriteable(suggest); + out.writeBoolean(timedOut); + out.writeOptionalBoolean(terminatedEarly); + out.writeOptionalWriteable(profileResults); + out.writeVInt(numReducePhases); out.writeVInt(totalShards); out.writeVInt(successfulShards); @@ -532,7 +633,7 @@ public Clusters(StreamInput in) throws IOException { this.total = in.readVInt(); int successfulTemp = in.readVInt(); int skippedTemp = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_053)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { List clusterList = in.readCollectionAsList(Cluster::new); if (clusterList.isEmpty()) { this.clusterInfo = Collections.emptyMap(); @@ -585,7 +686,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVInt(total); out.writeVInt(successful); out.writeVInt(skipped); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_053)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { if (clusterInfo != null) { List clusterList = clusterInfo.values().stream().toList(); out.writeCollection(clusterList); @@ -1268,17 +1369,14 @@ public String toString() { // public for tests public static SearchResponse empty(Supplier tookInMillisSupplier, Clusters clusters) { SearchHits searchHits = new SearchHits(new SearchHit[0], new TotalHits(0L, TotalHits.Relation.EQUAL_TO), Float.NaN); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse( + return new SearchResponse( searchHits, InternalAggregations.EMPTY, null, - null, false, null, - 0 - ); - return new SearchResponse( - internalSearchResponse, + null, + 0, null, 0, 0, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java index b6143cfc51c3a..1b616b9f3bc87 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseMerger.java @@ -27,7 +27,6 @@ import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.AggregationReduceContext; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; @@ -211,18 +210,15 @@ SearchResponse getMergedResponse(Clusters clusters) { SearchProfileResults profileShardResults = profileResults.isEmpty() ? null : new SearchProfileResults(profileResults); // make failures ordering consistent between ordinary search and CCS by looking at the shard they come from Arrays.sort(shardFailures, FAILURES_COMPARATOR); - InternalSearchResponse response = new InternalSearchResponse( + long tookInMillis = searchTimeProvider.buildTookInMillis(); + return new SearchResponse( mergedSearchHits, reducedAggs, suggest, - profileShardResults, topDocsStats.timedOut, topDocsStats.terminatedEarly, - numReducePhases - ); - long tookInMillis = searchTimeProvider.buildTookInMillis(); - return new SearchResponse( - response, + profileShardResults, + numReducePhases, null, totalShards, successfulShards, diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java index b4de15f4cc413..6f382b9e5f8d6 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponseSections.java @@ -8,30 +8,20 @@ package org.elasticsearch.action.search; -import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.Aggregations; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.profile.SearchProfileShardResult; import org.elasticsearch.search.suggest.Suggest; -import org.elasticsearch.xcontent.ToXContent; -import java.io.IOException; import java.util.Collections; -import java.util.Iterator; import java.util.Map; /** - * Base class that holds the various sections which a search response is - * composed of (hits, aggs, suggestions etc.) and allows to retrieve them. - * - * The reason why this class exists is that the high level REST client uses its own classes - * to parse aggregations into, which are not serializable. This is the common part that can be - * shared between core and client. + * Holds some sections that a search response is composed of (hits, aggs, suggestions etc.) during some steps of the search response + * building. */ -public class SearchResponseSections implements ChunkedToXContent { +public class SearchResponseSections { protected final SearchHits hits; protected final Aggregations aggregations; @@ -98,33 +88,4 @@ public final Map profile() { } return profileResults.getShardResults(); } - - @Override - public Iterator toXContentChunked(ToXContent.Params params) { - return Iterators.concat( - Iterators.flatMap(Iterators.single(hits), r -> r.toXContentChunked(params)), - Iterators.single((ToXContent) (b, p) -> { - if (aggregations != null) { - aggregations.toXContent(b, p); - } - return b; - }), - Iterators.single((b, p) -> { - if (suggest != null) { - suggest.toXContent(b, p); - } - return b; - }), - Iterators.single((b, p) -> { - if (profileResults != null) { - profileResults.toXContent(b, p); - } - return b; - }) - ); - } - - protected void writeTo(StreamOutput out) throws IOException { - throw new UnsupportedOperationException(); - } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java index 5681bda8b2741..885fd98fbdc15 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchScrollAsyncAction.java @@ -18,7 +18,6 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.InternalScrollSearchRequest; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.transport.RemoteClusterService; import org.elasticsearch.transport.Transport; @@ -240,7 +239,6 @@ protected final void sendResponse( final AtomicArray fetchResults ) { try { - final InternalSearchResponse internalResponse = SearchPhaseController.merge(true, queryPhase, fetchResults); // the scroll ID never changes we always return the same ID. This ID contains all the shards and their context ids // such that we can talk to them again in the next roundtrip. String scrollId = null; @@ -250,7 +248,7 @@ protected final void sendResponse( ActionListener.respondAndRelease( listener, new SearchResponse( - internalResponse, + SearchPhaseController.merge(true, queryPhase, fetchResults), scrollId, this.scrollId.getContext().length, successfulOps.get(), diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportAPMMetrics.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportAPMMetrics.java new file mode 100644 index 0000000000000..93b8e22d0d7cd --- /dev/null +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportAPMMetrics.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.action.search; + +import org.elasticsearch.telemetry.metric.LongHistogram; +import org.elasticsearch.telemetry.metric.MeterRegistry; + +public class SearchTransportAPMMetrics { + public static final String SEARCH_ACTION_LATENCY_BASE_METRIC = "es.search.nodes.transport_actions.latency.histogram"; + public static final String ACTION_ATTRIBUTE_NAME = "action"; + + public static final String QUERY_CAN_MATCH_NODE_METRIC = "shards_can_match"; + public static final String DFS_ACTION_METRIC = "dfs_query_then_fetch/shard_dfs_phase"; + public static final String QUERY_ID_ACTION_METRIC = "dfs_query_then_fetch/shard_query_phase"; + public static final String QUERY_ACTION_METRIC = "query_then_fetch/shard_query_phase"; + public static final String FREE_CONTEXT_ACTION_METRIC = "shard_release_context"; + public static final String FETCH_ID_ACTION_METRIC = "shard_fetch_phase"; + public static final String QUERY_SCROLL_ACTION_METRIC = "scroll/shard_query_phase"; + public static final String FETCH_ID_SCROLL_ACTION_METRIC = "scroll/shard_fetch_phase"; + public static final String QUERY_FETCH_SCROLL_ACTION_METRIC = "scroll/shard_query_and_fetch_phase"; + public static final String FREE_CONTEXT_SCROLL_ACTION_METRIC = "scroll/shard_release_context"; + public static final String CLEAR_SCROLL_CONTEXTS_ACTION_METRIC = "scroll/shard_release_contexts"; + + private final LongHistogram actionLatencies; + + public SearchTransportAPMMetrics(MeterRegistry meterRegistry) { + this( + meterRegistry.registerLongHistogram( + SEARCH_ACTION_LATENCY_BASE_METRIC, + "Transport action execution times at the node level, expressed as a histogram", + "millis" + ) + ); + } + + private SearchTransportAPMMetrics(LongHistogram actionLatencies) { + this.actionLatencies = actionLatencies; + } + + public LongHistogram getActionLatencies() { + return actionLatencies; + } +} diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index e46d26c3532ad..b7cc61ad70e2f 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -59,6 +59,19 @@ import java.util.Objects; import java.util.function.BiFunction; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.ACTION_ATTRIBUTE_NAME; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.CLEAR_SCROLL_CONTEXTS_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.DFS_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.FETCH_ID_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.FETCH_ID_SCROLL_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.FREE_CONTEXT_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.FREE_CONTEXT_SCROLL_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.QUERY_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.QUERY_CAN_MATCH_NODE_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.QUERY_FETCH_SCROLL_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.QUERY_ID_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.QUERY_SCROLL_ACTION_METRIC; + /** * An encapsulation of {@link org.elasticsearch.search.SearchService} operations exposed through * transport. @@ -68,13 +81,27 @@ public class SearchTransportService { public static final String FREE_CONTEXT_SCROLL_ACTION_NAME = "indices:data/read/search[free_context/scroll]"; public static final String FREE_CONTEXT_ACTION_NAME = "indices:data/read/search[free_context]"; public static final String CLEAR_SCROLL_CONTEXTS_ACTION_NAME = "indices:data/read/search[clear_scroll_contexts]"; + + /** + * Part of DFS_QUERY_THEN_FETCH, which fetches distributed term frequencies and executes KNN. + */ public static final String DFS_ACTION_NAME = "indices:data/read/search[phase/dfs]"; public static final String QUERY_ACTION_NAME = "indices:data/read/search[phase/query]"; + + /** + * Part of DFS_QUERY_THEN_FETCH, which fetches distributed term frequencies and executes KNN. + */ public static final String QUERY_ID_ACTION_NAME = "indices:data/read/search[phase/query/id]"; public static final String QUERY_SCROLL_ACTION_NAME = "indices:data/read/search[phase/query/scroll]"; public static final String QUERY_FETCH_SCROLL_ACTION_NAME = "indices:data/read/search[phase/query+fetch/scroll]"; public static final String FETCH_ID_SCROLL_ACTION_NAME = "indices:data/read/search[phase/fetch/id/scroll]"; public static final String FETCH_ID_ACTION_NAME = "indices:data/read/search[phase/fetch/id]"; + + /** + * The Can-Match phase. It is executed to pre-filter shards that a search request hits. It rewrites the query on + * the shard and checks whether the result of the rewrite matches no documents, in which case the shard can be + * filtered out. + */ public static final String QUERY_CAN_MATCH_NODE_NAME = "indices:data/read/search[can_match][n]"; private final TransportService transportService; @@ -382,35 +409,41 @@ public void writeTo(StreamOutput out) throws IOException { } } - public static void registerRequestHandler(TransportService transportService, SearchService searchService) { + public static void registerRequestHandler( + TransportService transportService, + SearchService searchService, + SearchTransportAPMMetrics searchTransportMetrics + ) { transportService.registerRequestHandler( FREE_CONTEXT_SCROLL_ACTION_NAME, EsExecutors.DIRECT_EXECUTOR_SERVICE, ScrollFreeContextRequest::new, - (request, channel, task) -> { + instrumentedHandler(FREE_CONTEXT_SCROLL_ACTION_METRIC, transportService, searchTransportMetrics, (request, channel, task) -> { boolean freed = searchService.freeReaderContext(request.id()); channel.sendResponse(new SearchFreeContextResponse(freed)); - } + }) ); TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_SCROLL_ACTION_NAME, false, SearchFreeContextResponse::new); + transportService.registerRequestHandler( FREE_CONTEXT_ACTION_NAME, EsExecutors.DIRECT_EXECUTOR_SERVICE, SearchFreeContextRequest::new, - (request, channel, task) -> { + instrumentedHandler(FREE_CONTEXT_ACTION_METRIC, transportService, searchTransportMetrics, (request, channel, task) -> { boolean freed = searchService.freeReaderContext(request.id()); channel.sendResponse(new SearchFreeContextResponse(freed)); - } + }) ); TransportActionProxy.registerProxyAction(transportService, FREE_CONTEXT_ACTION_NAME, false, SearchFreeContextResponse::new); + transportService.registerRequestHandler( CLEAR_SCROLL_CONTEXTS_ACTION_NAME, EsExecutors.DIRECT_EXECUTOR_SERVICE, TransportRequest.Empty::new, - (request, channel, task) -> { + instrumentedHandler(CLEAR_SCROLL_CONTEXTS_ACTION_METRIC, transportService, searchTransportMetrics, (request, channel, task) -> { searchService.freeAllScrollContexts(); channel.sendResponse(TransportResponse.Empty.INSTANCE); - } + }) ); TransportActionProxy.registerProxyAction( transportService, @@ -423,19 +456,32 @@ public static void registerRequestHandler(TransportService transportService, Sea DFS_ACTION_NAME, EsExecutors.DIRECT_EXECUTOR_SERVICE, ShardSearchRequest::new, - (request, channel, task) -> searchService.executeDfsPhase(request, (SearchShardTask) task, new ChannelActionListener<>(channel)) + instrumentedHandler( + DFS_ACTION_METRIC, + transportService, + searchTransportMetrics, + (request, channel, task) -> searchService.executeDfsPhase( + request, + (SearchShardTask) task, + new ChannelActionListener<>(channel) + ) + ) ); - TransportActionProxy.registerProxyAction(transportService, DFS_ACTION_NAME, true, DfsSearchResult::new); transportService.registerRequestHandler( QUERY_ACTION_NAME, EsExecutors.DIRECT_EXECUTOR_SERVICE, ShardSearchRequest::new, - (request, channel, task) -> searchService.executeQueryPhase( - request, - (SearchShardTask) task, - new ChannelActionListener<>(channel) + instrumentedHandler( + QUERY_ACTION_METRIC, + transportService, + searchTransportMetrics, + (request, channel, task) -> searchService.executeQueryPhase( + request, + (SearchShardTask) task, + new ChannelActionListener<>(channel) + ) ) ); TransportActionProxy.registerProxyActionWithDynamicResponseType( @@ -449,9 +495,16 @@ public static void registerRequestHandler(TransportService transportService, Sea QUERY_ID_ACTION_NAME, EsExecutors.DIRECT_EXECUTOR_SERVICE, QuerySearchRequest::new, - (request, channel, task) -> { - searchService.executeQueryPhase(request, (SearchShardTask) task, new ChannelActionListener<>(channel)); - } + instrumentedHandler( + QUERY_ID_ACTION_METRIC, + transportService, + searchTransportMetrics, + (request, channel, task) -> searchService.executeQueryPhase( + request, + (SearchShardTask) task, + new ChannelActionListener<>(channel) + ) + ) ); TransportActionProxy.registerProxyAction(transportService, QUERY_ID_ACTION_NAME, true, QuerySearchResult::new); @@ -459,9 +512,16 @@ public static void registerRequestHandler(TransportService transportService, Sea QUERY_SCROLL_ACTION_NAME, EsExecutors.DIRECT_EXECUTOR_SERVICE, InternalScrollSearchRequest::new, - (request, channel, task) -> { - searchService.executeQueryPhase(request, (SearchShardTask) task, new ChannelActionListener<>(channel)); - } + instrumentedHandler( + QUERY_SCROLL_ACTION_METRIC, + transportService, + searchTransportMetrics, + (request, channel, task) -> searchService.executeQueryPhase( + request, + (SearchShardTask) task, + new ChannelActionListener<>(channel) + ) + ) ); TransportActionProxy.registerProxyAction(transportService, QUERY_SCROLL_ACTION_NAME, true, ScrollQuerySearchResult::new); @@ -469,22 +529,33 @@ public static void registerRequestHandler(TransportService transportService, Sea QUERY_FETCH_SCROLL_ACTION_NAME, EsExecutors.DIRECT_EXECUTOR_SERVICE, InternalScrollSearchRequest::new, - (request, channel, task) -> { - searchService.executeFetchPhase(request, (SearchShardTask) task, new ChannelActionListener<>(channel)); - } + instrumentedHandler( + QUERY_FETCH_SCROLL_ACTION_METRIC, + transportService, + searchTransportMetrics, + (request, channel, task) -> searchService.executeFetchPhase( + request, + (SearchShardTask) task, + new ChannelActionListener<>(channel) + ) + ) ); TransportActionProxy.registerProxyAction(transportService, QUERY_FETCH_SCROLL_ACTION_NAME, true, ScrollQueryFetchSearchResult::new); - TransportRequestHandler shardFetchHandler = (request, channel, task) -> searchService.executeFetchPhase( - request, - (SearchShardTask) task, - new ChannelActionListener<>(channel) - ); transportService.registerRequestHandler( FETCH_ID_SCROLL_ACTION_NAME, EsExecutors.DIRECT_EXECUTOR_SERVICE, ShardFetchRequest::new, - shardFetchHandler + instrumentedHandler( + FETCH_ID_SCROLL_ACTION_METRIC, + transportService, + searchTransportMetrics, + (request, channel, task) -> searchService.executeFetchPhase( + request, + (SearchShardTask) task, + new ChannelActionListener<>(channel) + ) + ) ); TransportActionProxy.registerProxyAction(transportService, FETCH_ID_SCROLL_ACTION_NAME, true, FetchSearchResult::new); @@ -494,7 +565,16 @@ public static void registerRequestHandler(TransportService transportService, Sea true, true, ShardFetchSearchRequest::new, - shardFetchHandler + instrumentedHandler( + FETCH_ID_ACTION_METRIC, + transportService, + searchTransportMetrics, + (request, channel, task) -> searchService.executeFetchPhase( + request, + (SearchShardTask) task, + new ChannelActionListener<>(channel) + ) + ) ); TransportActionProxy.registerProxyAction(transportService, FETCH_ID_ACTION_NAME, true, FetchSearchResult::new); @@ -502,16 +582,39 @@ public static void registerRequestHandler(TransportService transportService, Sea QUERY_CAN_MATCH_NODE_NAME, transportService.getThreadPool().executor(ThreadPool.Names.SEARCH_COORDINATION), CanMatchNodeRequest::new, - (request, channel, task) -> searchService.canMatch(request, new ChannelActionListener<>(channel)) + instrumentedHandler( + QUERY_CAN_MATCH_NODE_METRIC, + transportService, + searchTransportMetrics, + (request, channel, task) -> searchService.canMatch(request, new ChannelActionListener<>(channel)) + ) ); TransportActionProxy.registerProxyAction(transportService, QUERY_CAN_MATCH_NODE_NAME, true, CanMatchNodeResponse::new); } + private static TransportRequestHandler instrumentedHandler( + String actionQualifier, + TransportService transportService, + SearchTransportAPMMetrics searchTransportMetrics, + TransportRequestHandler transportRequestHandler + ) { + return (request, channel, task) -> { + var startTime = transportService.getThreadPool().relativeTimeInMillis(); + try { + transportRequestHandler.messageReceived(request, channel, task); + } finally { + var elapsedTime = transportService.getThreadPool().relativeTimeInMillis() - startTime; + searchTransportMetrics.getActionLatencies().record(elapsedTime, Map.of(ACTION_ATTRIBUTE_NAME, actionQualifier)); + } + }; + } + /** * Returns a connection to the given node on the provided cluster. If the cluster alias is null the node will be resolved * against the local cluster. + * * @param clusterAlias the cluster alias the node should be resolved against - * @param node the node to resolve + * @param node the node to resolve * @return a connection to the given node belonging to the cluster with the provided alias. */ public Transport.Connection getConnection(@Nullable String clusterAlias, DiscoveryNode node) { diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java index 2bc642e6c0907..eb01cb2f3137b 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportOpenPointInTimeAction.java @@ -28,12 +28,12 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -252,7 +252,10 @@ public void onFailure(Exception e) { @Override protected void doRun() { - sendSearchResponse(InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, results.getAtomicArray()); + sendSearchResponse( + new SearchResponseSections(SearchHits.EMPTY_WITH_TOTAL_HITS, null, null, false, null, null, 1), + results.getAtomicArray() + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java index 6045a9ff5efa3..4e9aed5f643f2 100644 --- a/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java +++ b/server/src/main/java/org/elasticsearch/action/search/TransportSearchAction.java @@ -63,10 +63,8 @@ import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchService; import org.elasticsearch.search.aggregations.AggregationReduceContext; -import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.profile.SearchProfileShardResult; @@ -162,7 +160,8 @@ public TransportSearchAction( ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, NamedWriteableRegistry namedWriteableRegistry, - ExecutorSelector executorSelector + ExecutorSelector executorSelector, + SearchTransportAPMMetrics searchTransportMetrics ) { super(TYPE.name(), transportService, actionFilters, SearchRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.threadPool = threadPool; @@ -170,7 +169,7 @@ public TransportSearchAction( this.searchPhaseController = searchPhaseController; this.searchTransportService = searchTransportService; this.remoteClusterService = searchTransportService.getRemoteClusterService(); - SearchTransportService.registerRequestHandler(transportService, searchService); + SearchTransportService.registerRequestHandler(transportService, searchService, searchTransportMetrics); this.clusterService = clusterService; this.transportService = transportService; this.searchService = searchService; @@ -541,19 +540,16 @@ public void onResponse(SearchResponse searchResponse) { ? null : new SearchProfileResults(profileResults); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse( - searchResponse.getHits(), - (InternalAggregations) searchResponse.getAggregations(), - searchResponse.getSuggest(), - profile, - searchResponse.isTimedOut(), - searchResponse.isTerminatedEarly(), - searchResponse.getNumReducePhases() - ); ActionListener.respondAndRelease( listener, new SearchResponse( - internalSearchResponse, + searchResponse.getHits(), + searchResponse.getAggregations(), + searchResponse.getSuggest(), + searchResponse.isTimedOut(), + searchResponse.isTerminatedEarly(), + profile, + searchResponse.getNumReducePhases(), searchResponse.getScrollId(), searchResponse.getTotalShards(), searchResponse.getSuccessfulShards(), diff --git a/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java b/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java index 721983b6af0e7..e2b8fcbf2825c 100644 --- a/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java +++ b/server/src/main/java/org/elasticsearch/action/support/PlainActionFuture.java @@ -31,13 +31,15 @@ public class PlainActionFuture implements ActionFuture, ActionListener { @Override - public void onResponse(T result) { + public void onResponse(@Nullable T result) { set(result); } @Override public void onFailure(Exception e) { - setException(e); + if (sync.setException(Objects.requireNonNull(e))) { + done(false); + } } private static final String BLOCKING_OP_REASON = "Blocking operation"; @@ -115,23 +117,9 @@ public boolean cancel(boolean mayInterruptIfRunning) { return false; } done(false); - if (mayInterruptIfRunning) { - interruptTask(); - } return true; } - /** - * Subclasses can override this method to implement interruption of the - * future's computation. The method is invoked automatically by a successful - * call to {@link #cancel(boolean) cancel(true)}. - *

- * The default implementation does nothing. - * - * @since 10.0 - */ - protected void interruptTask() {} - /** * Subclasses should invoke this method to set the result of the computation * to {@code value}. This will set the state of the future to @@ -141,7 +129,7 @@ protected void interruptTask() {} * @param value the value that was the result of the task. * @return true if the state was successfully changed. */ - protected boolean set(@Nullable T value) { + protected final boolean set(@Nullable T value) { boolean result = sync.set(value); if (result) { done(true); @@ -149,33 +137,6 @@ protected boolean set(@Nullable T value) { return result; } - /** - * Subclasses should invoke this method to set the result of the computation - * to an error, {@code throwable}. This will set the state of the future to - * {@link PlainActionFuture.Sync#COMPLETED} and call {@link #done(boolean)} if the - * state was successfully changed. - * - * @param throwable the exception that the task failed with. - * @return true if the state was successfully changed. - * @throws Error if the throwable was an {@link Error}. - */ - protected boolean setException(Throwable throwable) { - boolean result = sync.setException(Objects.requireNonNull(throwable)); - if (result) { - done(false); - } - - // If it's an Error, we want to make sure it reaches the top of the - // call stack, so we rethrow it. - - // we want to notify the listeners we have with errors as well, as it breaks - // how we work in ES in terms of using assertions - // if (throwable instanceof Error) { - // throw (Error) throwable; - // } - return result; - } - /** * Called when the {@link PlainActionFuture} is completed. The {@code success} boolean indicates if the {@link * PlainActionFuture} was successfully completed (the value is {@code true}). In the cases the {@link PlainActionFuture} @@ -194,16 +155,6 @@ public T actionGet() { } } - @Override - public T actionGet(String timeout) { - return actionGet(TimeValue.parseTimeValue(timeout, null, getClass().getSimpleName() + ".actionGet.timeout")); - } - - @Override - public T actionGet(long timeoutMillis) { - return actionGet(timeoutMillis, TimeUnit.MILLISECONDS); - } - @Override public T actionGet(TimeValue timeout) { return actionGet(timeout.millis(), TimeUnit.MILLISECONDS); @@ -272,7 +223,7 @@ static final class Sync extends AbstractQueuedSynchronizer { static final int CANCELLED = 4; private V value; - private Throwable exception; + private Exception exception; /* * Acquisition succeeds if the future is done, otherwise it fails. @@ -311,7 +262,7 @@ V get(long nanos) throws TimeoutException, CancellationException, ExecutionExcep } /** - * Blocks until {@link #complete(Object, Throwable, int)} has been + * Blocks until {@link #complete(Object, Exception, int)} has been * successfully called. Throws a {@link CancellationException} if the task * was cancelled, or a {@link ExecutionException} if the task completed with * an error. @@ -390,8 +341,8 @@ boolean set(@Nullable V v) { /** * Transition to the COMPLETED state and set the exception. */ - boolean setException(Throwable t) { - return complete(null, t, COMPLETED); + boolean setException(Exception e) { + return complete(null, e, COMPLETED); } /** @@ -409,16 +360,16 @@ boolean cancel() { * final state ({@link #COMPLETED} or {@link #CANCELLED}). * * @param v the value to set as the result of the computation. - * @param t the exception to set as the result of the computation. + * @param e the exception to set as the result of the computation. * @param finalState the state to transition to. */ - private boolean complete(@Nullable V v, @Nullable Throwable t, int finalState) { + private boolean complete(@Nullable V v, @Nullable Exception e, int finalState) { boolean doCompletion = compareAndSetState(RUNNING, COMPLETING); if (doCompletion) { // If this thread successfully transitioned to COMPLETING, set the value // and exception and then release to the final state. this.value = v; - this.exception = t; + this.exception = e; releaseShared(finalState); } else if (getState() == COMPLETING) { // If some other thread is currently completing the future, block until diff --git a/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java b/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java index 17d712bdf5af4..9e3bed8cef09a 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/ClusterAdminClient.java @@ -16,9 +16,6 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest; -import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequestBuilder; -import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; @@ -88,9 +85,6 @@ import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptResponse; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequestBuilder; -import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequest; -import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequestBuilder; -import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; import org.elasticsearch.action.admin.indices.dangling.delete.DeleteDanglingIndexRequest; import org.elasticsearch.action.admin.indices.dangling.import_index.ImportDanglingIndexRequest; import org.elasticsearch.action.admin.indices.dangling.list.ListDanglingIndicesRequest; @@ -256,18 +250,6 @@ public interface ClusterAdminClient extends ElasticsearchClient { */ void nodesUsage(NodesUsageRequest request, ActionListener listener); - /** - * Returns top N hot-threads samples per node. The hot-threads are only sampled - * for the node ids specified in the request. - */ - void nodesHotThreads(NodesHotThreadsRequest request, ActionListener listener); - - /** - * Returns a request builder to fetch top N hot-threads samples per node. The hot-threads are only sampled - * for the node ids provided. Note: Use {@code *} to fetch samples for all nodes - */ - NodesHotThreadsRequestBuilder prepareNodesHotThreads(String... nodesIds); - /** * List tasks * @@ -456,18 +438,6 @@ public interface ClusterAdminClient extends ElasticsearchClient { */ RestoreSnapshotRequestBuilder prepareRestoreSnapshot(String repository, String snapshot); - /** - * Returns a list of the pending cluster tasks, that are scheduled to be executed. This includes operations - * that update the cluster state (for example, a create index operation) - */ - void pendingClusterTasks(PendingClusterTasksRequest request, ActionListener listener); - - /** - * Returns a list of the pending cluster tasks, that are scheduled to be executed. This includes operations - * that update the cluster state (for example, a create index operation) - */ - PendingClusterTasksRequestBuilder preparePendingClusterTasks(); - /** * Get snapshot status. */ diff --git a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java index 21c01abd52437..075d1a4bb1e66 100644 --- a/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java +++ b/server/src/main/java/org/elasticsearch/client/internal/support/AbstractClient.java @@ -24,10 +24,6 @@ import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequestBuilder; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; import org.elasticsearch.action.admin.cluster.health.TransportClusterHealthAction; -import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest; -import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequestBuilder; -import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse; -import org.elasticsearch.action.admin.cluster.node.hotthreads.TransportNodesHotThreadsAction; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequest; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoRequestBuilder; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; @@ -122,10 +118,6 @@ import org.elasticsearch.action.admin.cluster.storedscripts.PutStoredScriptRequestBuilder; import org.elasticsearch.action.admin.cluster.storedscripts.TransportDeleteStoredScriptAction; import org.elasticsearch.action.admin.cluster.storedscripts.TransportPutStoredScriptAction; -import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequest; -import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequestBuilder; -import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; -import org.elasticsearch.action.admin.cluster.tasks.TransportPendingClusterTasksAction; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequestBuilder; import org.elasticsearch.action.admin.indices.alias.TransportIndicesAliasesAction; @@ -795,16 +787,6 @@ public ClusterStatsRequestBuilder prepareClusterStats() { return new ClusterStatsRequestBuilder(this); } - @Override - public void nodesHotThreads(NodesHotThreadsRequest request, ActionListener listener) { - execute(TransportNodesHotThreadsAction.TYPE, request, listener); - } - - @Override - public NodesHotThreadsRequestBuilder prepareNodesHotThreads(String... nodesIds) { - return new NodesHotThreadsRequestBuilder(this).setNodesIds(nodesIds); - } - @Override public ActionFuture listTasks(final ListTasksRequest request) { return execute(TransportListTasksAction.TYPE, request); @@ -865,16 +847,6 @@ public ClusterSearchShardsRequestBuilder prepareSearchShards(String... indices) return new ClusterSearchShardsRequestBuilder(this).setIndices(indices); } - @Override - public PendingClusterTasksRequestBuilder preparePendingClusterTasks() { - return new PendingClusterTasksRequestBuilder(this); - } - - @Override - public void pendingClusterTasks(PendingClusterTasksRequest request, ActionListener listener) { - execute(TransportPendingClusterTasksAction.TYPE, request, listener); - } - @Override public void putRepository(PutRepositoryRequest request, ActionListener listener) { execute(TransportPutRepositoryAction.TYPE, request, listener); diff --git a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java index e861ff3ecf27e..6deac76b171d6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/ClusterState.java +++ b/server/src/main/java/org/elasticsearch/cluster/ClusterState.java @@ -1031,9 +1031,11 @@ public static ClusterState readFrom(StreamInput in, DiscoveryNode localNode) thr */ public static final TransportVersion INFERRED_TRANSPORT_VERSION = TransportVersions.V_8_8_0; + public static final Version VERSION_INTRODUCING_TRANSPORT_VERSIONS = Version.V_8_8_0; + private static TransportVersion inferTransportVersion(DiscoveryNode node) { TransportVersion tv; - if (node.getVersion().before(Version.V_8_8_0)) { + if (node.getVersion().before(VERSION_INTRODUCING_TRANSPORT_VERSIONS)) { // 1-to-1 mapping between Version and TransportVersion tv = TransportVersion.fromId(node.getPre811VersionId().getAsInt()); } else { diff --git a/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java b/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java index 1744bcc91b834..26c453d419f4c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java +++ b/server/src/main/java/org/elasticsearch/cluster/InternalClusterInfoService.java @@ -26,7 +26,6 @@ import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.allocation.DiskThresholdSettings; import org.elasticsearch.cluster.service.ClusterService; @@ -97,7 +96,6 @@ public class InternalClusterInfoService implements ClusterInfoService, ClusterSt private final Object mutex = new Object(); private final List> nextRefreshListeners = new ArrayList<>(); - private final ClusterService clusterService; private AsyncRefresh currentRefresh; private RefreshScheduler refreshScheduler; @@ -108,7 +106,6 @@ public InternalClusterInfoService(Settings settings, ClusterService clusterServi this.indicesStatsSummary = IndicesStatsSummary.EMPTY; this.threadPool = threadPool; this.client = client; - this.clusterService = clusterService; this.updateFrequency = INTERNAL_CLUSTER_INFO_UPDATE_INTERVAL_SETTING.get(settings); this.fetchTimeout = INTERNAL_CLUSTER_INFO_TIMEOUT_SETTING.get(settings); this.enabled = DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_THRESHOLD_ENABLED_SETTING.get(settings); @@ -250,7 +247,6 @@ public void onResponse(IndicesStatsResponse indicesStatsResponse) { final Map reservedSpaceBuilders = new HashMap<>(); buildShardLevelInfo( - clusterService.state().routingTable(), adjustShardStats(stats), shardSizeByIdentifierBuilder, shardDataSetSizeBuilder, @@ -445,7 +441,6 @@ public void addListener(Consumer clusterInfoConsumer) { } static void buildShardLevelInfo( - RoutingTable routingTable, ShardStats[] stats, Map shardSizes, Map shardDataSetSizeBuilder, @@ -453,7 +448,7 @@ static void buildShardLevelInfo( Map reservedSpaceByShard ) { for (ShardStats s : stats) { - final ShardRouting shardRouting = routingTable.deduplicate(s.getShardRouting()); + final ShardRouting shardRouting = s.getShardRouting(); dataPathByShard.put(ClusterInfo.NodeAndShard.from(shardRouting), s.getDataPath()); final StoreStats storeStats = s.getStats().getStore(); @@ -462,7 +457,7 @@ static void buildShardLevelInfo( } final long size = storeStats.sizeInBytes(); final long dataSetSize = storeStats.totalDataSetSizeInBytes(); - final long reserved = storeStats.getReservedSize().getBytes(); + final long reserved = storeStats.reservedSizeInBytes(); final String shardIdentifier = ClusterInfo.shardIdentifierFromRouting(shardRouting); logger.trace("shard: {} size: {} reserved: {}", shardIdentifier, size, reserved); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java index cc0ff0b26f4d7..6147712a5d70a 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/AliasValidator.java @@ -24,7 +24,6 @@ import org.elasticsearch.xcontent.XContentParserConfiguration; import java.io.IOException; -import java.io.InputStream; import java.util.Optional; import java.util.function.Function; @@ -142,14 +141,11 @@ public static void validateAliasFilter( assert searchExecutionContext != null; try ( - InputStream inputStream = filter.streamInput(); - XContentParser parser = XContentFactory.xContentType(inputStream) - .xContent() - .createParser( - XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry) - .withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), - filter.streamInput() - ) + XContentParser parser = XContentHelper.createParserNotCompressed( + XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry).withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), + filter, + XContentHelper.xContentType(filter) + ) ) { validateAliasFilter(parser, searchExecutionContext); } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java index 34d8515d2dfdd..35f66f2848f5f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStream.java @@ -28,6 +28,7 @@ import org.elasticsearch.common.time.DateFormatter; import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.util.FeatureFlag; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; @@ -38,7 +39,6 @@ import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContentObject; -import org.elasticsearch.xcontent.XContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; @@ -1129,8 +1129,7 @@ private static Instant getTimeStampFromRaw(Object rawTimestamp) { } private static Instant getTimestampFromParser(BytesReference source, XContentType xContentType) { - XContent xContent = xContentType.xContent(); - try (XContentParser parser = xContent.createParser(TS_EXTRACT_CONFIG, source.streamInput())) { + try (XContentParser parser = XContentHelper.createParserNotCompressed(TS_EXTRACT_CONFIG, source, xContentType)) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser); return switch (parser.nextToken()) { diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java index 8d7ce0525e943..83a5d99c8f348 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/DataStreamLifecycle.java @@ -48,7 +48,7 @@ public class DataStreamLifecycle implements SimpleDiffable, ToXContentObject { // Versions over the wire - public static final TransportVersion ADDED_ENABLED_FLAG_VERSION = TransportVersions.V_8_500_057; + public static final TransportVersion ADDED_ENABLED_FLAG_VERSION = TransportVersions.V_8_500_061; public static final String DATA_STREAMS_LIFECYCLE_ONLY_SETTING_NAME = "data_streams.lifecycle_only.mode"; @@ -190,10 +190,8 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { out.writeOptionalWriteable(dataRetention); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { - out.writeOptionalWriteable(downsampling); - } if (out.getTransportVersion().onOrAfter(ADDED_ENABLED_FLAG_VERSION)) { + out.writeOptionalWriteable(downsampling); out.writeBoolean(enabled); } } @@ -204,14 +202,11 @@ public DataStreamLifecycle(StreamInput in) throws IOException { } else { dataRetention = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { - downsampling = in.readOptionalWriteable(Downsampling::read); - } else { - downsampling = null; - } if (in.getTransportVersion().onOrAfter(ADDED_ENABLED_FLAG_VERSION)) { + downsampling = in.readOptionalWriteable(Downsampling::read); enabled = in.readBoolean(); } else { + downsampling = null; enabled = true; } } diff --git a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java index 43f117acbd9fe..e77a7b27e1a2c 100644 --- a/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java +++ b/server/src/main/java/org/elasticsearch/cluster/node/DiscoveryNode.java @@ -337,7 +337,7 @@ public DiscoveryNode(StreamInput in) throws IOException { } } this.roles = Collections.unmodifiableSortedSet(roles); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { versionInfo = new VersionInformation(Version.readVersion(in), IndexVersion.readVersion(in), IndexVersion.readVersion(in)); } else { versionInfo = inferVersionInformation(Version.readVersion(in)); @@ -374,7 +374,7 @@ public void writeTo(StreamOutput out) throws IOException { o.writeString(role.roleNameAbbreviation()); o.writeBoolean(role.canContainData()); }); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { Version.writeVersion(versionInfo.nodeVersion(), out); IndexVersion.writeVersion(versionInfo.minIndexVersion(), out); IndexVersion.writeVersion(versionInfo.maxIndexVersion(), out); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimator.java b/server/src/main/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimator.java index 85e201d52f03b..1f364e1ace6e4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimator.java @@ -20,9 +20,13 @@ public class ExpectedShardSizeEstimator { - public static long getExpectedShardSize(ShardRouting shardRouting, long defaultSize, RoutingAllocation allocation) { + public static boolean shouldReserveSpaceForInitializingShard(ShardRouting shard, RoutingAllocation allocation) { + return shouldReserveSpaceForInitializingShard(shard, allocation.metadata()); + } + + public static long getExpectedShardSize(ShardRouting shard, long defaultSize, RoutingAllocation allocation) { return getExpectedShardSize( - shardRouting, + shard, defaultSize, allocation.clusterInfo(), allocation.snapshotShardSizeInfo(), @@ -31,6 +35,27 @@ public static long getExpectedShardSize(ShardRouting shardRouting, long defaultS ); } + public static boolean shouldReserveSpaceForInitializingShard(ShardRouting shard, Metadata metadata) { + assert shard.initializing() : "Expected initializing shard, got: " + shard; + return switch (shard.recoverySource().getType()) { + // No need to reserve disk space when initializing a new empty shard + case EMPTY_STORE -> false; + + // No need to reserve disk space if the shard is already allocated on the disk. Starting it is not going to use more. + case EXISTING_STORE -> false; + + // Peer recovery require downloading all segments locally to start the shard. Reserve disk space for this + case PEER -> true; + + // Snapshot restore (unless it is partial) require downloading all segments locally from the blobstore to start the shard. + case SNAPSHOT -> metadata.getIndexSafe(shard.index()).isPartialSearchableSnapshot() == false; + + // shrink/split/clone operation is going to clone existing locally placed shards using file system hard links + // so no additional space is going to be used until future merges + case LOCAL_SHARDS -> false; + }; + } + /** * Returns the expected shard size for the given shard or the default value provided if not enough information are available * to estimate the shards size. diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java index cd05ca3d523d8..9003e7720c747 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/IndexRouting.java @@ -281,16 +281,14 @@ public Builder builder() { private Builder hashSource(XContentType sourceType, BytesReference source) { Builder b = builder(); - try { - try (XContentParser parser = sourceType.xContent().createParser(parserConfig, source.streamInput())) { - parser.nextToken(); // Move to first token - if (parser.currentToken() == null) { - throw new IllegalArgumentException("Error extracting routing: source didn't contain any routing fields"); - } - parser.nextToken(); - b.extractObject(null, parser); - ensureExpectedToken(null, parser.nextToken(), parser); + try (XContentParser parser = sourceType.xContent().createParser(parserConfig, source.streamInput())) { + parser.nextToken(); // Move to first token + if (parser.currentToken() == null) { + throw new IllegalArgumentException("Error extracting routing: source didn't contain any routing fields"); } + parser.nextToken(); + b.extractObject(null, parser); + ensureExpectedToken(null, parser.nextToken(), parser); } catch (IOException | ParsingException e) { throw new IllegalArgumentException("Error extracting routing: " + e.getMessage(), e); } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java index 986a6bd0385e8..723d65fbc2a3f 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/RoutingTable.java @@ -148,33 +148,6 @@ public IndexShardRoutingTable shardRoutingTable(ShardId shardId) { return shard; } - /** - * Try to deduplicate the given shard routing with an equal instance found in this routing table. This is used by the logic of the - * {@link org.elasticsearch.cluster.routing.allocation.decider.DiskThresholdDecider} and - * {@link org.elasticsearch.cluster.InternalClusterInfoService} to deduplicate instances created by a master node and those read from - * the network to speed up the use of {@link ShardRouting} as a map key in {@link org.elasticsearch.cluster.ClusterInfo#getDataPath}. - * - * @param shardRouting shard routing to deduplicate - * @return deduplicated shard routing from this routing table if an equivalent shard routing was found or the given instance otherwise - */ - public ShardRouting deduplicate(ShardRouting shardRouting) { - final IndexRoutingTable indexShardRoutingTable = indicesRouting.get(shardRouting.index().getName()); - if (indexShardRoutingTable == null) { - return shardRouting; - } - final IndexShardRoutingTable shardRoutingTable = indexShardRoutingTable.shard(shardRouting.id()); - if (shardRoutingTable == null) { - return shardRouting; - } - for (int i = 0; i < shardRoutingTable.size(); i++) { - ShardRouting found = shardRoutingTable.shard(i); - if (shardRouting.equals(found)) { - return found; - } - } - return shardRouting; - } - @Nullable public ShardRouting getByAllocationId(ShardId shardId, String allocationId) { final IndexRoutingTable indexRoutingTable = index(shardId.getIndex()); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java index 5c216b9a5b308..6645fd7d0e895 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/DiskThresholdMonitor.java @@ -34,6 +34,7 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.Releasable; import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.snapshots.SnapshotShardSizeInfo; import java.util.ArrayList; import java.util.Collections; @@ -425,6 +426,7 @@ long sizeOfRelocatingShards(RoutingNode routingNode, DiskUsage diskUsage, Cluste true, diskUsage.getPath(), info, + SnapshotShardSizeInfo.EMPTY, reroutedClusterState.metadata(), reroutedClusterState.routingTable(), 0L diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java index e92a6106a6e33..2fa1994f9f74b 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDecider.java @@ -32,6 +32,7 @@ import java.util.Map; import static org.elasticsearch.cluster.routing.ExpectedShardSizeEstimator.getExpectedShardSize; +import static org.elasticsearch.cluster.routing.ExpectedShardSizeEstimator.shouldReserveSpaceForInitializingShard; /** * The {@link DiskThresholdDecider} checks that the node a shard is potentially @@ -117,6 +118,7 @@ public static long sizeOfUnaccountedShards( boolean subtractShardsMovingAway, String dataPath, ClusterInfo clusterInfo, + SnapshotShardSizeInfo snapshotShardSizeInfo, Metadata metadata, RoutingTable routingTable, long sizeOfUnaccountableSearchableSnapshotShards @@ -129,28 +131,18 @@ public static long sizeOfUnaccountedShards( // Where reserved space is unavailable (e.g. stats are out-of-sync) compute a conservative estimate for initialising shards for (ShardRouting routing : node.initializing()) { - if (routing.relocatingNodeId() == null && metadata.getIndexSafe(routing.index()).isSearchableSnapshot() == false) { - // in practice the only initializing-but-not-relocating non-searchable-snapshot shards with a nonzero expected shard size - // will be ones created - // by a resize (shrink/split/clone) operation which we expect to happen using hard links, so they shouldn't be taking - // any additional space and can be ignored here - continue; - } - if (reservedSpace.containsShardId(routing.shardId())) { - continue; - } - final String actualPath = clusterInfo.getDataPath(routing); - // if we don't yet know the actual path of the incoming shard then conservatively assume it's going to the path with the least - // free space - if (actualPath == null || actualPath.equals(dataPath)) { - totalSize += getExpectedShardSize( - routing, - Math.max(routing.getExpectedShardSize(), 0L), - clusterInfo, - SnapshotShardSizeInfo.EMPTY, - metadata, - routingTable - ); + // Space needs to be reserved only when initializing shards that are going to use additional space + // that is not yet accounted for by `reservedSpace` in case of lengthy recoveries + if (shouldReserveSpaceForInitializingShard(routing, metadata) && reservedSpace.containsShardId(routing.shardId()) == false) { + final String actualPath = clusterInfo.getDataPath(routing); + // if we don't yet know the actual path of the incoming shard then conservatively assume + // it's going to the path with the least free space + if (actualPath == null || actualPath.equals(dataPath)) { + totalSize += Math.max( + routing.getExpectedShardSize(), + getExpectedShardSize(routing, 0L, clusterInfo, snapshotShardSizeInfo, metadata, routingTable) + ); + } } } @@ -159,7 +151,7 @@ public static long sizeOfUnaccountedShards( if (subtractShardsMovingAway) { for (ShardRouting routing : node.relocating()) { if (dataPath.equals(clusterInfo.getDataPath(routing))) { - totalSize -= getExpectedShardSize(routing, 0L, clusterInfo, SnapshotShardSizeInfo.EMPTY, metadata, routingTable); + totalSize -= getExpectedShardSize(routing, 0L, clusterInfo, snapshotShardSizeInfo, metadata, routingTable); } } } @@ -204,6 +196,7 @@ public Decision canAllocate(ShardRouting shardRouting, RoutingNode node, Routing false, usage.getPath(), allocation.clusterInfo(), + allocation.snapshotShardSizeInfo(), allocation.metadata(), allocation.routingTable(), allocation.unaccountedSearchableSnapshotSize(node) @@ -412,6 +405,7 @@ public Decision canRemain(IndexMetadata indexMetadata, ShardRouting shardRouting true, usage.getPath(), allocation.clusterInfo(), + allocation.snapshotShardSizeInfo(), allocation.metadata(), allocation.routingTable(), allocation.unaccountedSearchableSnapshotSize(node) @@ -491,6 +485,7 @@ private static DiskUsageWithRelocations getDiskUsage( subtractLeavingShards, usage.getPath(), allocation.clusterInfo(), + allocation.snapshotShardSizeInfo(), allocation.metadata(), allocation.routingTable(), allocation.unaccountedSearchableSnapshotSize(node) diff --git a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java index 9f852f01397da..33d8fbf99f31f 100644 --- a/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java +++ b/server/src/main/java/org/elasticsearch/common/collect/ImmutableOpenMap.java @@ -10,9 +10,9 @@ import com.carrotsearch.hppc.ObjectCollection; import com.carrotsearch.hppc.ObjectObjectHashMap; -import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; import com.carrotsearch.hppc.procedures.ObjectObjectProcedure; +import com.carrotsearch.hppc.procedures.ObjectProcedure; import org.elasticsearch.common.util.Maps; @@ -84,18 +84,9 @@ public boolean containsKey(Object key) { } @Override + @SuppressWarnings("unchecked") public boolean containsValue(Object value) { - for (ObjectCursor cursor : map.values()) { - if (Objects.equals(cursor.value, value)) { - return true; - } - } - return false; - } - - @Override - public VType put(KType key, VType value) { - throw new UnsupportedOperationException("modification is not supported"); + return map.values().contains((VType) value); } @Override @@ -103,16 +94,6 @@ public VType remove(Object key) { throw new UnsupportedOperationException("modification is not supported"); } - @Override - public void putAll(Map m) { - throw new UnsupportedOperationException("modification is not supported"); - } - - @Override - public void clear() { - throw new UnsupportedOperationException("modification is not supported"); - } - @Override public int size() { return map.size(); @@ -146,35 +127,7 @@ public int hashCode() { return super.hashCode(); } - private static final class ConversionIterator implements Iterator> { - - private final Iterator> original; - - ConversionIterator(Iterator> original) { - this.original = original; - } - - @Override - public boolean hasNext() { - return original.hasNext(); - } - - @Override - public Map.Entry next() { - final ObjectObjectCursor obj = original.next(); - if (obj == null) { - return null; - } - return new Maps.ImmutableEntry<>(obj.key, obj.value); - } - - @Override - public void remove() { - throw new UnsupportedOperationException("removal is unsupported"); - } - } - - private static final class EntrySet extends AbstractSet> { + private static class EntrySet extends AbstractSet> { private final ObjectObjectHashMap map; private EntrySet(ObjectObjectHashMap map) { @@ -187,13 +140,23 @@ public int size() { } @Override - public void clear() { - throw new UnsupportedOperationException("removal is unsupported"); + public boolean isEmpty() { + return map.isEmpty(); } @Override public Iterator> iterator() { - return new ConversionIterator<>(map.iterator()); + return Iterators.map(map.iterator(), c -> new Maps.ImmutableEntry<>(c.key, c.value)); + } + + @Override + public Spliterator> spliterator() { + return Spliterators.spliterator(iterator(), size(), Spliterator.IMMUTABLE); + } + + @Override + public void forEach(Consumer> action) { + map.forEach((Consumer>) c -> action.accept(new Maps.ImmutableEntry<>(c.key, c.value))); } @SuppressWarnings("unchecked") @@ -204,70 +167,87 @@ public boolean contains(Object o) { } Map.Entry e = (Map.Entry) o; Object key = e.getKey(); - if (map.containsKey((KType) key) == false) { + Object v = map.get((KType) key); + if (v == null && map.containsKey((KType) key) == false) { return false; } - Object val = map.get((KType) key); - return Objects.equals(val, e.getValue()); + return Objects.equals(v, e.getValue()); } @Override - public boolean remove(Object o) { - throw new UnsupportedOperationException("removal is not supported"); + public String toString() { + return map.toString(); + } + } + + private static class MapObjectCollection extends AbstractCollection { + private final ObjectCollection collection; + + private MapObjectCollection(ObjectCollection collection) { + this.collection = collection; } @Override - public Spliterator> spliterator() { - return Spliterators.spliterator(iterator(), size(), Spliterator.SIZED); + public int size() { + return collection.size(); } @Override - public void forEach(Consumer> action) { - map.forEach((Consumer>) ooCursor -> { - Maps.ImmutableEntry entry = new Maps.ImmutableEntry<>(ooCursor.key, ooCursor.value); - action.accept(entry); - }); + public boolean isEmpty() { + return collection.isEmpty(); } - } - private static final class KeySet extends AbstractSet { + @Override + public Iterator iterator() { + return Iterators.map(collection.iterator(), c -> c.value); + } + + @Override + public Spliterator spliterator() { + return Spliterators.spliterator(iterator(), size(), Spliterator.IMMUTABLE); + } - private final ObjectObjectHashMap.KeysContainer keys; + @Override + public void forEach(Consumer action) { + collection.forEach((ObjectProcedure) action::accept); + } - private KeySet(ObjectObjectHashMap.KeysContainer keys) { - this.keys = keys; + @Override + @SuppressWarnings("unchecked") + public boolean contains(Object o) { + return collection.contains((Type) o); } @Override - public Iterator iterator() { - final Iterator> iterator = keys.iterator(); - return new Iterator<>() { - @Override - public boolean hasNext() { - return iterator.hasNext(); - } - - @Override - public KType next() { - return iterator.next().value; - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - }; + public boolean equals(Object obj) { + return collection.equals(obj); } @Override - public int size() { - return keys.size(); + public int hashCode() { + return collection.hashCode(); + } + + @Override + public String toString() { + return collection.toString(); + } + + @Override + public Object[] toArray() { + return collection.toArray(); } @Override @SuppressWarnings("unchecked") - public boolean contains(Object o) { - return keys.contains((KType) o); + public T[] toArray(T[] a) { + return a.length == 0 ? (T[]) collection.toArray(a.getClass().getComponentType()) : super.toArray(a); + } + } + + private static class KeySet extends MapObjectCollection implements Set { + private KeySet(ObjectObjectHashMap.KeysContainer keys) { + super(keys); } }; @@ -278,17 +258,7 @@ public Set keySet() { @Override public Collection values() { - return new AbstractCollection() { - @Override - public Iterator iterator() { - return ImmutableOpenMap.iterator(map.values()); - } - - @Override - public int size() { - return map.size(); - } - }; + return new MapObjectCollection<>(map.values()); } @Override @@ -296,26 +266,6 @@ public void forEach(BiConsumer action) { map.forEach((ObjectObjectProcedure) action::accept); } - static Iterator iterator(ObjectCollection collection) { - final Iterator> iterator = collection.iterator(); - return new Iterator<>() { - @Override - public boolean hasNext() { - return iterator.hasNext(); - } - - @Override - public T next() { - return iterator.next().value; - } - - @Override - public void remove() { - throw new UnsupportedOperationException(); - } - }; - } - @Override public String toString() { return map.toString(); @@ -407,9 +357,7 @@ public ImmutableOpenMap build() { */ public Builder putAllFromMap(Map map) { maybeCloneMap(); - for (Map.Entry entry : map.entrySet()) { - this.mutableMap.put(entry.getKey(), entry.getValue()); - } + map.forEach(mutableMap::put); return this; } diff --git a/server/src/main/java/org/elasticsearch/common/collect/Iterators.java b/server/src/main/java/org/elasticsearch/common/collect/Iterators.java index d7c63edac2c94..4b5cef4bbbd45 100644 --- a/server/src/main/java/org/elasticsearch/common/collect/Iterators.java +++ b/server/src/main/java/org/elasticsearch/common/collect/Iterators.java @@ -10,11 +10,13 @@ import org.elasticsearch.core.Nullable; +import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.NoSuchElementException; import java.util.Objects; import java.util.function.BiPredicate; +import java.util.function.Consumer; import java.util.function.Function; import java.util.function.IntFunction; import java.util.function.ToIntFunction; @@ -90,35 +92,19 @@ public T next() { } return value; } - } - - public static Iterator forArray(T[] array) { - return new ArrayIterator<>(array); - } - - private static final class ArrayIterator implements Iterator { - - private final T[] array; - private int index; - - private ArrayIterator(T[] array) { - this.array = Objects.requireNonNull(array, "Unable to iterate over a null array"); - } @Override - public boolean hasNext() { - return index < array.length; - } - - @Override - public T next() { - if (index >= array.length) { - throw new NoSuchElementException(); + public void forEachRemaining(Consumer action) { + while (index < iterators.length) { + iterators[index++].forEachRemaining(action); } - return array[index++]; } } + public static Iterator forArray(T[] array) { + return Arrays.asList(array).iterator(); + } + public static Iterator forRange(int lowerBoundInclusive, int upperBoundExclusive, IntFunction fn) { assert lowerBoundInclusive <= upperBoundExclusive : lowerBoundInclusive + " vs " + upperBoundExclusive; if (upperBoundExclusive <= lowerBoundInclusive) { @@ -183,6 +169,11 @@ public boolean hasNext() { public U next() { return fn.apply(input.next()); } + + @Override + public void forEachRemaining(Consumer action) { + input.forEachRemaining(t -> action.accept(fn.apply(t))); + } } public static Iterator flatMap(Iterator input, Function> fn) { diff --git a/server/src/main/java/org/elasticsearch/common/compress/CompressedXContent.java b/server/src/main/java/org/elasticsearch/common/compress/CompressedXContent.java index bda33e28fa315..5ebcca93889ff 100644 --- a/server/src/main/java/org/elasticsearch/common/compress/CompressedXContent.java +++ b/server/src/main/java/org/elasticsearch/common/compress/CompressedXContent.java @@ -164,11 +164,9 @@ public CompressedXContent(byte[] data) throws IOException { * @return compressed x-content normalized to not contain any whitespaces */ public static CompressedXContent fromJSON(String json) throws IOException { - return new CompressedXContent( - (ToXContentObject) (builder, params) -> builder.copyCurrentStructure( - JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, json) - ) - ); + try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, json)) { + return new CompressedXContent((ToXContentObject) (builder, params) -> builder.copyCurrentStructure(parser)); + } } public CompressedXContent(String str) throws IOException { diff --git a/server/src/main/java/org/elasticsearch/common/geo/GeometryFormatterFactory.java b/server/src/main/java/org/elasticsearch/common/geo/GeometryFormatterFactory.java index 1f4ca454b9c8c..1201bab887861 100644 --- a/server/src/main/java/org/elasticsearch/common/geo/GeometryFormatterFactory.java +++ b/server/src/main/java/org/elasticsearch/common/geo/GeometryFormatterFactory.java @@ -9,8 +9,10 @@ package org.elasticsearch.common.geo; import org.elasticsearch.geometry.Geometry; +import org.elasticsearch.geometry.utils.WellKnownBinary; import org.elasticsearch.geometry.utils.WellKnownText; +import java.nio.ByteOrder; import java.util.ArrayList; import java.util.List; import java.util.function.Function; @@ -22,6 +24,7 @@ public class GeometryFormatterFactory { public static final String GEOJSON = "geojson"; public static final String WKT = "wkt"; + public static final String WKB = "wkb"; /** * Returns a formatter by name @@ -38,6 +41,11 @@ public static Function, List> getFormatter(String name, Func geometries.forEach((shape) -> objects.add(WellKnownText.toWKT(toGeometry.apply(shape)))); return objects; }; + case WKB -> geometries -> { + final List objects = new ArrayList<>(geometries.size()); + geometries.forEach((shape) -> objects.add(WellKnownBinary.toWKB(toGeometry.apply(shape), ByteOrder.LITTLE_ENDIAN))); + return objects; + }; default -> throw new IllegalArgumentException("Unrecognized geometry format [" + name + "]."); }; } diff --git a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java index 8e469973c0f08..a2416fe6a6a15 100644 --- a/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/elasticsearch/common/settings/ClusterSettings.java @@ -574,6 +574,7 @@ public void apply(Settings value, Settings current, Settings previous) { RemoteClusterPortSettings.MAX_REQUEST_HEADER_SIZE, HealthPeriodicLogger.POLL_INTERVAL_SETTING, HealthPeriodicLogger.ENABLED_SETTING, + HealthPeriodicLogger.OUTPUT_MODE_SETTING, DataStreamLifecycle.CLUSTER_LIFECYCLE_DEFAULT_ROLLOVER_SETTING, IndicesClusterStateService.SHARD_LOCK_RETRY_INTERVAL_SETTING, IndicesClusterStateService.SHARD_LOCK_RETRY_TIMEOUT_SETTING, diff --git a/server/src/main/java/org/elasticsearch/common/util/BitArray.java b/server/src/main/java/org/elasticsearch/common/util/BitArray.java index 696e81b3beec9..96c00538f07d4 100644 --- a/server/src/main/java/org/elasticsearch/common/util/BitArray.java +++ b/server/src/main/java/org/elasticsearch/common/util/BitArray.java @@ -129,6 +129,10 @@ public boolean get(long index) { return (bits.get(wordNum) & bitmask) != 0; } + public long size() { + return bits.size() * (long) Long.BYTES * Byte.SIZE; + } + private static long wordNum(long index) { return index >> 6; } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java index 3bfe5078a3487..1295aabda1e56 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentHelper.java @@ -74,10 +74,26 @@ public static XContentParser createParser(XContentParserConfiguration config, By final XContentType contentType = XContentFactory.xContentType(compressedInput); return XContentFactory.xContent(contentType).createParser(config, compressedInput); } else { - return XContentFactory.xContent(xContentType(bytes)).createParser(config, bytes.streamInput()); + return createParserNotCompressed(config, bytes, xContentType(bytes)); } } + /** + * Same as {@link #createParser(XContentParserConfiguration, BytesReference, XContentType)} but only supports uncompressed + * {@code bytes}. + */ + public static XContentParser createParserNotCompressed( + XContentParserConfiguration config, + BytesReference bytes, + XContentType xContentType + ) throws IOException { + XContent xContent = xContentType.xContent(); + if (bytes.hasArray()) { + return xContent.createParser(config, bytes.array(), bytes.arrayOffset(), bytes.length()); + } + return xContent.createParser(config, bytes.streamInput()); + } + /** * Creates a parser for the bytes provided * @deprecated use {@link #createParser(XContentParserConfiguration, BytesReference, XContentType)} @@ -111,10 +127,7 @@ public static XContentParser createParser(XContentParserConfiguration config, By return XContentFactory.xContent(xContentType).createParser(config, compressedInput); } else { // TODO now that we have config we make a method on bytes to do this building wihout needing this check everywhere - if (bytes.hasArray()) { - return xContentType.xContent().createParser(config, bytes.array(), bytes.arrayOffset(), bytes.length()); - } - return xContentType.xContent().createParser(config, bytes.streamInput()); + return createParserNotCompressed(config, bytes, xContentType); } } @@ -337,20 +350,8 @@ public static String convertToJson(BytesReference bytes, boolean reformatJson, b return bytes.utf8ToString(); } - if (bytes.hasArray()) { - try ( - XContentParser parser = XContentFactory.xContent(xContentType) - .createParser(XContentParserConfiguration.EMPTY, bytes.array(), bytes.arrayOffset(), bytes.length()) - ) { - return toJsonString(prettyPrint, parser); - } - } else { - try ( - InputStream stream = bytes.streamInput(); - XContentParser parser = XContentFactory.xContent(xContentType).createParser(XContentParserConfiguration.EMPTY, stream) - ) { - return toJsonString(prettyPrint, parser); - } + try (var parser = createParserNotCompressed(XContentParserConfiguration.EMPTY, bytes, xContentType)) { + return toJsonString(prettyPrint, parser); } } @@ -746,7 +747,7 @@ public static void writeTo(StreamOutput out, XContentType xContentType) throws I public static XContentParser mapToXContentParser(XContentParserConfiguration config, Map source) { try (XContentBuilder builder = XContentFactory.contentBuilder(XContentType.JSON)) { builder.map(source); - return XContentFactory.xContent(builder.contentType()).createParser(config, Strings.toString(builder)); + return createParserNotCompressed(config, BytesReference.bytes(builder), builder.contentType()); } catch (IOException e) { throw new ElasticsearchGenerationException("Failed to generate [" + source + "]", e); } diff --git a/server/src/main/java/org/elasticsearch/common/xcontent/XContentParserUtils.java b/server/src/main/java/org/elasticsearch/common/xcontent/XContentParserUtils.java index 715274fbcdc33..a72ee1d28b7f0 100644 --- a/server/src/main/java/org/elasticsearch/common/xcontent/XContentParserUtils.java +++ b/server/src/main/java/org/elasticsearch/common/xcontent/XContentParserUtils.java @@ -8,6 +8,7 @@ package org.elasticsearch.common.xcontent; +import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; @@ -178,4 +179,33 @@ public static List parseList(XContentParser parser, CheckedFunction List parseList(XContentParser parser, CheckedBiFunction valueParser) + throws IOException { + XContentParserUtils.ensureExpectedToken(Token.START_ARRAY, parser.currentToken(), parser); + + if (parser.nextToken() == Token.END_ARRAY) { + return List.of(); + } + + final ArrayList list = new ArrayList<>(); + + int index = 0; + do { + list.add(valueParser.apply(parser, index++)); + } while (parser.nextToken() != Token.END_ARRAY); + + return list; + } } diff --git a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java index fcf50ba3a8a44..5e515291d1fea 100644 --- a/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java +++ b/server/src/main/java/org/elasticsearch/gateway/PersistedClusterStateService.java @@ -61,6 +61,7 @@ import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Assertions; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.CheckedFunction; @@ -668,8 +669,7 @@ public OnDiskStateMetadata loadOnDiskStateMetadataFromUserData(Map T readXContent(BytesReference bytes, CheckedFunction reader) throws IOException { - final XContentParser parser = XContentFactory.xContent(XContentType.SMILE).createParser(parserConfig, bytes.streamInput()); - try { + try (XContentParser parser = XContentHelper.createParserNotCompressed(parserConfig, bytes, XContentType.SMILE)) { return reader.apply(parser); } catch (Exception e) { throw new CorruptStateException(e); diff --git a/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java b/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java index 55b03ec1192c8..878930c2962d0 100644 --- a/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java +++ b/server/src/main/java/org/elasticsearch/health/HealthPeriodicLogger.java @@ -27,16 +27,24 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.gateway.GatewayService; import org.elasticsearch.health.node.selection.HealthNode; +import org.elasticsearch.telemetry.TelemetryProvider; +import org.elasticsearch.telemetry.metric.LongGaugeMetric; +import org.elasticsearch.telemetry.metric.MeterRegistry; import java.io.Closeable; import java.time.Clock; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.function.BiConsumer; +import java.util.function.Consumer; import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.RED; /** * This class periodically logs the results of the Health API to the standard Elasticsearch server log file. @@ -45,6 +53,37 @@ public class HealthPeriodicLogger implements ClusterStateListener, Closeable, Sc public static final String HEALTH_FIELD_PREFIX = "elasticsearch.health"; public static final String MESSAGE_FIELD = "message"; + /** + * Valid modes of output for this logger + */ + public enum OutputMode { + LOGS("logs"), + METRICS("metrics"); + + private final String mode; + + OutputMode(String mode) { + this.mode = mode; + } + + public static OutputMode fromString(String mode) { + return valueOf(mode.toUpperCase(Locale.ROOT)); + } + + @Override + public String toString() { + return this.mode.toLowerCase(Locale.ROOT); + } + + static OutputMode parseOutputMode(String value) { + try { + return OutputMode.fromString(value); + } catch (Exception e) { + throw new IllegalArgumentException("Illegal OutputMode:" + value); + } + } + } + public static final Setting POLL_INTERVAL_SETTING = Setting.timeSetting( "health.periodic_logger.poll_interval", TimeValue.timeValueSeconds(60), @@ -60,6 +99,14 @@ public class HealthPeriodicLogger implements ClusterStateListener, Closeable, Sc Setting.Property.NodeScope ); + public static final Setting> OUTPUT_MODE_SETTING = Setting.listSetting( + "health.periodic_logger.output_mode", + List.of(OutputMode.LOGS.toString(), OutputMode.METRICS.toString()), + OutputMode::parseOutputMode, + Setting.Property.Dynamic, + Setting.Property.NodeScope + ); + /** * Name constant for the job HealthService schedules */ @@ -81,9 +128,18 @@ public class HealthPeriodicLogger implements ClusterStateListener, Closeable, Sc private final SetOnce scheduler = new SetOnce<>(); private volatile TimeValue pollInterval; private volatile boolean enabled; + private volatile Set outputModes; private static final Logger logger = LogManager.getLogger(HealthPeriodicLogger.class); + private final MeterRegistry meterRegistry; + private final Map redMetrics = new HashMap<>(); + + // Writers for logs or messages + // default visibility for testing purposes + private final BiConsumer metricWriter; + private final Consumer logWriter; + /** * Creates a new HealthPeriodicLogger. * This creates a scheduled job using the SchedulerEngine framework and runs it on the current health node. @@ -92,19 +148,49 @@ public class HealthPeriodicLogger implements ClusterStateListener, Closeable, Sc * @param clusterService the cluster service, used to know when the health node changes. * @param client the client used to call the Health Service. * @param healthService the Health Service, where the actual Health API logic lives. + * @param telemetryProvider used to get the meter registry for metrics */ public static HealthPeriodicLogger create( Settings settings, ClusterService clusterService, Client client, - HealthService healthService + HealthService healthService, + TelemetryProvider telemetryProvider + ) { + return HealthPeriodicLogger.create(settings, clusterService, client, healthService, telemetryProvider, null, null); + } + + static HealthPeriodicLogger create( + Settings settings, + ClusterService clusterService, + Client client, + HealthService healthService, + TelemetryProvider telemetryProvider, + BiConsumer metricWriter, + Consumer logWriter ) { - HealthPeriodicLogger logger = new HealthPeriodicLogger(settings, clusterService, client, healthService); + HealthPeriodicLogger logger = new HealthPeriodicLogger( + settings, + clusterService, + client, + healthService, + telemetryProvider.getMeterRegistry(), + metricWriter, + logWriter + ); logger.registerListeners(); return logger; } - private HealthPeriodicLogger(Settings settings, ClusterService clusterService, Client client, HealthService healthService) { + private HealthPeriodicLogger( + Settings settings, + ClusterService clusterService, + Client client, + HealthService healthService, + MeterRegistry meterRegistry, + BiConsumer metricWriter, + Consumer logWriter + ) { this.settings = settings; this.clusterService = clusterService; this.client = client; @@ -112,6 +198,13 @@ private HealthPeriodicLogger(Settings settings, ClusterService clusterService, C this.clock = Clock.systemUTC(); this.pollInterval = POLL_INTERVAL_SETTING.get(settings); this.enabled = ENABLED_SETTING.get(settings); + this.outputModes = new HashSet<>(OUTPUT_MODE_SETTING.get(settings)); + this.meterRegistry = meterRegistry; + this.metricWriter = metricWriter == null ? LongGaugeMetric::set : metricWriter; + this.logWriter = logWriter == null ? logger::info : logWriter; + + // create metric for overall level metrics + this.redMetrics.put("overall", LongGaugeMetric.create(this.meterRegistry, "es.health.overall.red", "Overall: Red", "{cluster}")); } private void registerListeners() { @@ -120,6 +213,7 @@ private void registerListeners() { } clusterService.getClusterSettings().addSettingsUpdateConsumer(ENABLED_SETTING, this::enable); clusterService.getClusterSettings().addSettingsUpdateConsumer(POLL_INTERVAL_SETTING, this::updatePollInterval); + clusterService.getClusterSettings().addSettingsUpdateConsumer(OUTPUT_MODE_SETTING, this::updateOutputModes); } @Override @@ -186,7 +280,6 @@ SchedulerEngine getScheduler() { /** * Create a Map of the results, which is then turned into JSON for logging. - * * The structure looks like: * {"elasticsearch.health.overall.status": "green", "elasticsearch.health.[other indicators].status": "green"} * Only the indicator status values are included, along with the computed top-level status. @@ -202,7 +295,7 @@ static Map convertToLoggedFields(List ind final Map result = new HashMap<>(); // overall status - final HealthStatus status = HealthStatus.merge(indicatorResults.stream().map(HealthIndicatorResult::status)); + final HealthStatus status = calculateOverallStatus(indicatorResults); result.put(String.format(Locale.ROOT, "%s.overall.status", HEALTH_FIELD_PREFIX), status.xContentValue()); // top-level status for each indicator @@ -228,6 +321,10 @@ static Map convertToLoggedFields(List ind return result; } + static HealthStatus calculateOverallStatus(List indicatorResults) { + return HealthStatus.merge(indicatorResults.stream().map(HealthIndicatorResult::status)); + } + /** * Handle the result of the Health Service getHealth call */ @@ -236,13 +333,21 @@ static Map convertToLoggedFields(List ind @Override public void onResponse(List healthIndicatorResults) { try { - Map resultsMap = convertToLoggedFields(healthIndicatorResults); + if (logsEnabled()) { + Map resultsMap = convertToLoggedFields(healthIndicatorResults); + + // if we have a valid response, log in JSON format + if (resultsMap.isEmpty() == false) { + ESLogMessage msg = new ESLogMessage().withFields(resultsMap); + logWriter.accept(msg); + } + } - // if we have a valid response, log in JSON format - if (resultsMap.isEmpty() == false) { - ESLogMessage msg = new ESLogMessage().withFields(resultsMap); - logger.info(msg); + // handle metrics + if (metricsEnabled()) { + writeMetrics(healthIndicatorResults); } + } catch (Exception e) { logger.warn("Health Periodic Logger error:{}", e.toString()); } @@ -254,6 +359,49 @@ public void onFailure(Exception e) { } }; + /** + * Write (and possibly create) the APM metrics + */ + // default visibility for testing purposes + void writeMetrics(List healthIndicatorResults) { + if (healthIndicatorResults != null) { + for (HealthIndicatorResult result : healthIndicatorResults) { + String metricName = result.name(); + LongGaugeMetric metric = this.redMetrics.get(metricName); + if (metric == null) { + metric = LongGaugeMetric.create( + this.meterRegistry, + String.format(Locale.ROOT, "es.health.%s.red", metricName), + String.format(Locale.ROOT, "%s: Red", metricName), + "{cluster}" + ); + this.redMetrics.put(metricName, metric); + } + metricWriter.accept(metric, result.status() == RED ? 1L : 0L); + } + + metricWriter.accept(this.redMetrics.get("overall"), calculateOverallStatus(healthIndicatorResults) == RED ? 1L : 0L); + } + } + + private void updateOutputModes(List newMode) { + this.outputModes = new HashSet<>(newMode); + } + + /** + * Returns true if any of the outputModes are set to logs + */ + private boolean logsEnabled() { + return this.outputModes.contains(OutputMode.LOGS); + } + + /** + * Returns true if any of the outputModes are set to metrics + */ + private boolean metricsEnabled() { + return this.outputModes.contains(OutputMode.METRICS); + } + /** * Create the SchedulerEngine.Job if this node is the health node */ diff --git a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java index 24df7875f7e3d..f4dbf8115da33 100644 --- a/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java +++ b/server/src/main/java/org/elasticsearch/http/DefaultRestChannel.java @@ -26,7 +26,6 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.telemetry.tracing.SpanId; import org.elasticsearch.telemetry.tracing.Tracer; import java.util.ArrayList; @@ -91,8 +90,6 @@ public void sendResponse(RestResponse restResponse) { // We're sending a response so we know we won't be needing the request content again and release it httpRequest.release(); - final SpanId spanId = SpanId.forRestRequest(request); - final ArrayList toClose = new ArrayList<>(4); if (HttpUtils.shouldCloseConnection(httpRequest)) { toClose.add(() -> CloseableChannel.closeChannel(httpChannel)); @@ -174,9 +171,9 @@ public void sendResponse(RestResponse restResponse) { addCookies(httpResponse); - tracer.setAttribute(spanId, "http.status_code", restResponse.status().getStatus()); + tracer.setAttribute(request, "http.status_code", restResponse.status().getStatus()); restResponse.getHeaders() - .forEach((key, values) -> tracer.setAttribute(spanId, "http.response.headers." + key, String.join("; ", values))); + .forEach((key, values) -> tracer.setAttribute(request, "http.response.headers." + key, String.join("; ", values))); ActionListener listener = ActionListener.releasing(Releasables.wrap(toClose)); if (httpLogger != null) { diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersion.java b/server/src/main/java/org/elasticsearch/index/IndexVersion.java index 765cc256d84b1..f4edb8b1d4039 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersion.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersion.java @@ -56,13 +56,11 @@ public record IndexVersion(int id, Version luceneVersion) implements VersionId null); - if (versionExtension == null) { - return IndexVersions.LATEST_DEFINED; - } - var version = versionExtension.getCurrentIndexVersion(IndexVersions.LATEST_DEFINED); + var version = ExtensionLoader.loadSingleton(ServiceLoader.load(VersionExtension.class)) + .map(e -> e.getCurrentIndexVersion(IndexVersions.LATEST_DEFINED)) + .orElse(IndexVersions.LATEST_DEFINED); assert version.onOrAfter(IndexVersions.LATEST_DEFINED); assert version.luceneVersion.equals(Version.LATEST) diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 125f9529c4165..4419abba73c1b 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -92,6 +92,10 @@ private static IndexVersion def(int id, Version luceneVersion) { public static final IndexVersion ES_VERSION_8_12 = def(8_500_004, Version.LUCENE_9_8_0); public static final IndexVersion NORMALIZED_VECTOR_COSINE = def(8_500_005, Version.LUCENE_9_8_0); public static final IndexVersion UPGRADE_LUCENE_9_9 = def(8_500_006, Version.LUCENE_9_9_0); + public static final IndexVersion NORI_DUPLICATES = def(8_500_007, Version.LUCENE_9_9_0); + public static final IndexVersion UPGRADE_LUCENE_9_9_1 = def(8_500_008, Version.LUCENE_9_9_1); + public static final IndexVersion ES_VERSION_8_13 = def(8_500_009, Version.LUCENE_9_9_1); + public static final IndexVersion NEW_INDEXVERSION_FORMAT = def(8_501_00_0, Version.LUCENE_9_9_1); /* * STOP! READ THIS FIRST! No, really, @@ -104,18 +108,46 @@ private static IndexVersion def(int id, Version luceneVersion) { * A new index version should be added EVERY TIME a change is made to index metadata or data storage. * Each index version should only be used in a single merged commit (apart from the BwC versions copied from o.e.Version, ≤V_8_11_0). * - * To add a new index version, add a new constant at the bottom of the list, above this comment, which is one greater than the - * current highest version id. Use a descriptive constant name. Don't add other lines, comments, etc. + * ADDING AN INDEX VERSION + * To add a new index version, add a new constant at the bottom of the list, above this comment. Don't add other lines, + * comments, etc. The version id has the following layout: + * + * M_NNN_SS_P + * + * M - The major version of Elasticsearch + * NNN - The server version part + * SS - The serverless version part. It should always be 00 here, it is used by serverless only. + * P - The patch version part + * + * To determine the id of the next IndexVersion constant, do the following: + * - Use the same major version, unless bumping majors + * - Bump the server version part by 1, unless creating a patch version + * - Leave the serverless part as 00 + * - Bump the patch part if creating a patch version + * + * If a patch version is created, it should be placed sorted among the other existing constants. * * REVERTING AN INDEX VERSION * * If you revert a commit with an index version change, you MUST ensure there is a NEW index version representing the reverted * change. DO NOT let the index version go backwards, it must ALWAYS be incremented. * - * DETERMINING TRANSPORT VERSIONS FROM GIT HISTORY + * DETERMINING INDEX VERSIONS FROM GIT HISTORY + * + * If your git checkout has the expected minor-version-numbered branches and the expected release-version tags then you can find the + * index versions known by a particular release ... + * + * git show v8.12.0:server/src/main/java/org/elasticsearch/index/IndexVersions.java | grep '= def' + * + * ... or by a particular branch ... + * + * git show 8.12:server/src/main/java/org/elasticsearch/index/IndexVersions.java | grep '= def' + * + * ... and you can see which versions were added in between two versions too ... + * + * git diff v8.12.0..main -- server/src/main/java/org/elasticsearch/index/IndexVersions.java * - * TODO after the release of v8.11.0, copy the instructions about using git to track the history of versions from TransportVersion.java - * (the example commands won't make sense until at least 8.11.0 is released) + * In branches 8.7-8.11 see server/src/main/java/org/elasticsearch/index/IndexVersion.java for the equivalent definitions. */ public static final IndexVersion MINIMUM_COMPATIBLE = V_7_0_0; diff --git a/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java index e19ee050c93a7..d3e281ca115e1 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/Analysis.java @@ -44,6 +44,7 @@ import org.apache.lucene.analysis.sv.SwedishAnalyzer; import org.apache.lucene.analysis.th.ThaiAnalyzer; import org.apache.lucene.analysis.tr.TurkishAnalyzer; +import org.apache.lucene.analysis.util.CSVUtil; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; import org.elasticsearch.common.logging.DeprecationCategory; @@ -64,6 +65,7 @@ import java.nio.file.Path; import java.util.ArrayList; import java.util.Collection; +import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; @@ -257,6 +259,52 @@ public static List getWordList( } } + public static List getWordList( + Environment env, + Settings settings, + String settingPath, + String settingList, + boolean removeComments, + boolean checkDuplicate + ) { + final List ruleList = getWordList(env, settings, settingPath, settingList, removeComments); + if (ruleList != null && ruleList.isEmpty() == false && checkDuplicate) { + checkDuplicateRules(ruleList); + } + return ruleList; + } + + /** + * This method checks for any duplicate rules in the provided ruleList. Each rule in the list is parsed with CSVUtil.parse + * to separate the rule into individual components, represented as a String array. Only the first component from each rule + * is considered in the duplication check. + * + * The method will ignore any line that starts with a '#' character, treating it as a comment. + * + * The check is performed by adding the first component of each rule into a HashSet (dup), which does not allow duplicates. + * If the addition to the HashSet returns false, it means that item was already present in the set, indicating a duplicate. + * In such a case, an IllegalArgumentException is thrown specifying the duplicate term and the line number in the original list. + * + * @param ruleList The list of rules to check for duplicates. + * @throws IllegalArgumentException If a duplicate rule is found. + */ + private static void checkDuplicateRules(List ruleList) { + Set dup = new HashSet<>(); + int lineNum = 0; + for (String line : ruleList) { + // ignore comments + if (line.startsWith("#") == false) { + String[] values = CSVUtil.parse(line); + if (dup.add(values[0]) == false) { + throw new IllegalArgumentException( + "Found duplicate term [" + values[0] + "] in user dictionary " + "at line [" + lineNum + "]" + ); + } + } + ++lineNum; + } + } + private static List loadWordList(Path path, boolean removeComments) throws IOException { final List result = new ArrayList<>(); try (BufferedReader br = Files.newBufferedReader(path, StandardCharsets.UTF_8)) { diff --git a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java index d2ca31fe6a197..852547ecb1073 100644 --- a/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java +++ b/server/src/main/java/org/elasticsearch/index/codec/PerFieldMapperCodec.java @@ -19,6 +19,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat; +import org.elasticsearch.index.codec.postings.ES812PostingsFormat; import org.elasticsearch.index.codec.tsdb.ES87TSDBDocValuesFormat; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -44,6 +45,8 @@ public final class PerFieldMapperCodec extends Lucene99Codec { private final ES87BloomFilterPostingsFormat bloomFilterPostingsFormat; private final ES87TSDBDocValuesFormat tsdbDocValuesFormat; + private final ES812PostingsFormat es812PostingsFormat; + static { assert Codec.forName(Lucene.LATEST_CODEC).getClass().isAssignableFrom(PerFieldMapperCodec.class) : "PerFieldMapperCodec must subclass the latest lucene codec: " + Lucene.LATEST_CODEC; @@ -54,6 +57,7 @@ public PerFieldMapperCodec(Mode compressionMode, MapperService mapperService, Bi this.mapperService = mapperService; this.bloomFilterPostingsFormat = new ES87BloomFilterPostingsFormat(bigArrays, this::internalGetPostingsFormatForField); this.tsdbDocValuesFormat = new ES87TSDBDocValuesFormat(); + this.es812PostingsFormat = new ES812PostingsFormat(); } @Override @@ -69,7 +73,8 @@ private PostingsFormat internalGetPostingsFormatForField(String field) { if (format != null) { return format; } - return super.getPostingsFormatForField(field); + // return our own posting format using PFOR + return es812PostingsFormat; } boolean useBloomFilter(String field) { diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsFormat.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsFormat.java new file mode 100644 index 0000000000000..5270326876e08 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsFormat.java @@ -0,0 +1,506 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2022 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.postings; + +import org.apache.lucene.codecs.BlockTermState; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.FieldsConsumer; +import org.apache.lucene.codecs.FieldsProducer; +import org.apache.lucene.codecs.MultiLevelSkipListWriter; +import org.apache.lucene.codecs.PostingsFormat; +import org.apache.lucene.codecs.PostingsReaderBase; +import org.apache.lucene.codecs.PostingsWriterBase; +import org.apache.lucene.codecs.lucene90.blocktree.Lucene90BlockTreeTermsReader; +import org.apache.lucene.codecs.lucene90.blocktree.Lucene90BlockTreeTermsWriter; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.index.TermState; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.util.packed.PackedInts; +import org.elasticsearch.core.IOUtils; + +import java.io.IOException; + +/** + * Based on Lucene 9.0 postings format, which encodes postings in packed integer blocks for fast decode. + * It is introduced to preserve PFOR space efficiency when Lucene switched back to FOR in 9.9 + * + *

Basic idea: + * + *

    + *
  • Packed Blocks and VInt Blocks: + *

    In packed blocks, integers are encoded with the same bit width ({@link PackedInts packed + * format}): the block size (i.e. number of integers inside block) is fixed (currently 128). + * Additionally blocks that are all the same value are encoded in an optimized way. + *

    In VInt blocks, integers are encoded as {@link DataOutput#writeVInt VInt}: the block + * size is variable. + *

  • Block structure: + *

    When the postings are long enough, Lucene90PostingsFormat will try to encode most + * integer data as a packed block. + *

    Take a term with 259 documents as an example, the first 256 document ids are encoded as + * two packed blocks, while the remaining 3 are encoded as one VInt block. + *

    Different kinds of data are always encoded separately into different packed blocks, but + * may possibly be interleaved into the same VInt block. + *

    This strategy is applied to pairs: <document number, frequency>, <position, + * payload length>, <position, offset start, offset length>, and <position, + * payload length, offsetstart, offset length>. + *

  • Skipdata settings: + *

    The structure of skip table is quite similar to previous version of Lucene. Skip + * interval is the same as block size, and each skip entry points to the beginning of each + * block. However, for the first block, skip data is omitted. + *

  • Positions, Payloads, and Offsets: + *

    A position is an integer indicating where the term occurs within one document. A payload + * is a blob of metadata associated with current position. An offset is a pair of integers + * indicating the tokenized start/end offsets for given term in current position: it is + * essentially a specialized payload. + *

    When payloads and offsets are not omitted, numPositions==numPayloads==numOffsets + * (assuming a null payload contributes one count). As mentioned in block structure, it is + * possible to encode these three either combined or separately. + *

    In all cases, payloads and offsets are stored together. When encoded as a packed block, + * position data is separated out as .pos, while payloads and offsets are encoded in .pay + * (payload metadata will also be stored directly in .pay). When encoded as VInt blocks, all + * these three are stored interleaved into the .pos (so is payload metadata). + *

    With this strategy, the majority of payload and offset data will be outside .pos file. + * So for queries that require only position data, running on a full index with payloads and + * offsets, this reduces disk pre-fetches. + *

+ * + *

Files and detailed format: + * + *

+ * + * + * + *
+ *
Term Dictionary + *

The .tim file contains the list of terms in each field along with per-term statistics + * (such as docfreq) and pointers to the frequencies, positions, payload and skip data in the + * .doc, .pos, and .pay files. See {@link Lucene90BlockTreeTermsWriter} for more details on + * the format. + *

NOTE: The term dictionary can plug into different postings implementations: the postings + * writer/reader are actually responsible for encoding and decoding the PostingsHeader and + * TermMetadata sections described here: + *

    + *
  • PostingsHeader --> Header, PackedBlockSize + *
  • TermMetadata --> (DocFPDelta|SingletonDocID), PosFPDelta?, PosVIntBlockFPDelta?, + * PayFPDelta?, SkipFPDelta? + *
  • Header, --> {@link CodecUtil#writeIndexHeader IndexHeader} + *
  • PackedBlockSize, SingletonDocID --> {@link DataOutput#writeVInt VInt} + *
  • DocFPDelta, PosFPDelta, PayFPDelta, PosVIntBlockFPDelta, SkipFPDelta --> {@link + * DataOutput#writeVLong VLong} + *
  • Footer --> {@link CodecUtil#writeFooter CodecFooter} + *
+ *

Notes: + *

    + *
  • Header is a {@link CodecUtil#writeIndexHeader IndexHeader} storing the version + * information for the postings. + *
  • PackedBlockSize is the fixed block size for packed blocks. In packed block, bit width + * is determined by the largest integer. Smaller block size result in smaller variance + * among width of integers hence smaller indexes. Larger block size result in more + * efficient bulk i/o hence better acceleration. This value should always be a multiple + * of 64, currently fixed as 128 as a tradeoff. It is also the skip interval used to + * accelerate {@link org.apache.lucene.index.PostingsEnum#advance(int)}. + *
  • DocFPDelta determines the position of this term's TermFreqs within the .doc file. In + * particular, it is the difference of file offset between this term's data and previous + * term's data (or zero, for the first term in the block).On disk it is stored as the + * difference from previous value in sequence. + *
  • PosFPDelta determines the position of this term's TermPositions within the .pos file. + * While PayFPDelta determines the position of this term's <TermPayloads, + * TermOffsets?> within the .pay file. Similar to DocFPDelta, it is the difference + * between two file positions (or neglected, for fields that omit payloads and offsets). + *
  • PosVIntBlockFPDelta determines the position of this term's last TermPosition in last + * pos packed block within the .pos file. It is synonym for PayVIntBlockFPDelta or + * OffsetVIntBlockFPDelta. This is actually used to indicate whether it is necessary to + * load following payloads and offsets from .pos instead of .pay. Every time a new block + * of positions are to be loaded, the PostingsReader will use this value to check + * whether current block is packed format or VInt. When packed format, payloads and + * offsets are fetched from .pay, otherwise from .pos. (this value is neglected when + * total number of positions i.e. totalTermFreq is less or equal to PackedBlockSize). + *
  • SkipFPDelta determines the position of this term's SkipData within the .doc file. In + * particular, it is the length of the TermFreq data. SkipDelta is only stored if + * DocFreq is not smaller than SkipMinimum (i.e. 128 in Lucene90PostingsFormat). + *
  • SingletonDocID is an optimization when a term only appears in one document. In this + * case, instead of writing a file pointer to the .doc file (DocFPDelta), and then a + * VIntBlock at that location, the single document ID is written to the term dictionary. + *
+ *
+ * + * + * + *
+ *
Term Index + *

The .tip file contains an index into the term dictionary, so that it can be accessed + * randomly. See {@link Lucene90BlockTreeTermsWriter} for more details on the format. + *

+ * + * + * + *
+ *
Frequencies and Skip Data + *

The .doc file contains the lists of documents which contain each term, along with the + * frequency of the term in that document (except when frequencies are omitted: {@link + * IndexOptions#DOCS}). It also saves skip data to the beginning of each packed or VInt block, + * when the length of document list is larger than packed block size. + *

    + *
  • docFile(.doc) --> Header, <TermFreqs, SkipData?>TermCount, Footer + *
  • Header --> {@link CodecUtil#writeIndexHeader IndexHeader} + *
  • TermFreqs --> <PackedBlock> PackedDocBlockNum, VIntBlock? + *
  • PackedBlock --> PackedDocDeltaBlock, PackedFreqBlock? + *
  • VIntBlock --> <DocDelta[, + * Freq?]>DocFreq-PackedBlockSize*PackedDocBlockNum + *
  • SkipData --> <<SkipLevelLength, SkipLevel> NumSkipLevels-1, + * SkipLevel>, SkipDatum? + *
  • SkipLevel --> <SkipDatum> TrimmedDocFreq/(PackedBlockSize^(Level + + * 1)) + *
  • SkipDatum --> DocSkip, DocFPSkip, <PosFPSkip, PosBlockOffset, PayLength?, + * PayFPSkip?>?, ImpactLength, <CompetitiveFreqDelta, CompetitiveNormDelta?> + * ImpactCount, SkipChildLevelPointer? + *
  • PackedDocDeltaBlock, PackedFreqBlock --> {@link PackedInts PackedInts} + *
  • DocDelta, Freq, DocSkip, DocFPSkip, PosFPSkip, PosBlockOffset, PayByteUpto, + * PayFPSkip, ImpactLength, CompetitiveFreqDelta --> {@link DataOutput#writeVInt + * VInt} + *
  • CompetitiveNormDelta --> {@link DataOutput#writeZLong ZLong} + *
  • SkipChildLevelPointer --> {@link DataOutput#writeVLong VLong} + *
  • Footer --> {@link CodecUtil#writeFooter CodecFooter} + *
+ *

Notes: + *

    + *
  • PackedDocDeltaBlock is theoretically generated from two steps: + *
      + *
    1. Calculate the difference between each document number and previous one, and get + * a d-gaps list (for the first document, use absolute value); + *
    2. For those d-gaps from first one to + * PackedDocBlockNum*PackedBlockSizeth, separately encode as packed + * blocks. + *
    + * If frequencies are not omitted, PackedFreqBlock will be generated without d-gap step. + *
  • VIntBlock stores remaining d-gaps (along with frequencies when possible) with a + * format that encodes DocDelta and Freq: + *

    DocDelta: if frequencies are indexed, this determines both the document number and + * the frequency. In particular, DocDelta/2 is the difference between this document + * number and the previous document number (or zero when this is the first document in a + * TermFreqs). When DocDelta is odd, the frequency is one. When DocDelta is even, the + * frequency is read as another VInt. If frequencies are omitted, DocDelta contains the + * gap (not multiplied by 2) between document numbers and no frequency information is + * stored. + *

    For example, the TermFreqs for a term which occurs once in document seven and + * three times in document eleven, with frequencies indexed, would be the following + * sequence of VInts: + *

    15, 8, 3 + *

    If frequencies were omitted ({@link IndexOptions#DOCS}) it would be this sequence + * of VInts instead: + *

    7,4 + *

  • PackedDocBlockNum is the number of packed blocks for current term's docids or + * frequencies. In particular, PackedDocBlockNum = floor(DocFreq/PackedBlockSize) + *
  • TrimmedDocFreq = DocFreq % PackedBlockSize == 0 ? DocFreq - 1 : DocFreq. We use this + * trick since the definition of skip entry is a little different from base interface. + * In {@link MultiLevelSkipListWriter}, skip data is assumed to be saved for + * skipIntervalth, 2*skipIntervalth ... posting in the list. + * However, in Lucene90PostingsFormat, the skip data is saved for + * skipInterval+1th, 2*skipInterval+1th ... posting + * (skipInterval==PackedBlockSize in this case). When DocFreq is multiple of + * PackedBlockSize, MultiLevelSkipListWriter will expect one more skip data than + * Lucene90SkipWriter. + *
  • SkipDatum is the metadata of one skip entry. For the first block (no matter packed or + * VInt), it is omitted. + *
  • DocSkip records the document number of every PackedBlockSizeth document + * number in the postings (i.e. last document number in each packed block). On disk it + * is stored as the difference from previous value in the sequence. + *
  • DocFPSkip records the file offsets of each block (excluding )posting at + * PackedBlockSize+1th, 2*PackedBlockSize+1th ... , in DocFile. + * The file offsets are relative to the start of current term's TermFreqs. On disk it is + * also stored as the difference from previous SkipDatum in the sequence. + *
  • Since positions and payloads are also block encoded, the skip should skip to related + * block first, then fetch the values according to in-block offset. PosFPSkip and + * PayFPSkip record the file offsets of related block in .pos and .pay, respectively. + * While PosBlockOffset indicates which value to fetch inside the related block + * (PayBlockOffset is unnecessary since it is always equal to PosBlockOffset). Same as + * DocFPSkip, the file offsets are relative to the start of current term's TermFreqs, + * and stored as a difference sequence. + *
  • PayByteUpto indicates the start offset of the current payload. It is equivalent to + * the sum of the payload lengths in the current block up to PosBlockOffset + *
  • ImpactLength is the total length of CompetitiveFreqDelta and CompetitiveNormDelta + * pairs. CompetitiveFreqDelta and CompetitiveNormDelta are used to safely skip score + * calculation for uncompetitive documents; See {@link + * org.apache.lucene.codecs.CompetitiveImpactAccumulator} for more details. + *
+ *
+ * + * + * + *
+ *
Positions + *

The .pos file contains the lists of positions that each term occurs at within documents. + * It also sometimes stores part of payloads and offsets for speedup. + *

    + *
  • PosFile(.pos) --> Header, <TermPositions> TermCount, Footer + *
  • Header --> {@link CodecUtil#writeIndexHeader IndexHeader} + *
  • TermPositions --> <PackedPosDeltaBlock> PackedPosBlockNum, + * VIntBlock? + *
  • VIntBlock --> <PositionDelta[, PayloadLength?], PayloadData?, OffsetDelta?, + * OffsetLength?>PosVIntCount + *
  • PackedPosDeltaBlock --> {@link PackedInts PackedInts} + *
  • PositionDelta, OffsetDelta, OffsetLength --> {@link DataOutput#writeVInt VInt} + *
  • PayloadData --> {@link DataOutput#writeByte byte}PayLength + *
  • Footer --> {@link CodecUtil#writeFooter CodecFooter} + *
+ *

Notes: + *

    + *
  • TermPositions are order by term (terms are implicit, from the term dictionary), and + * position values for each term document pair are incremental, and ordered by document + * number. + *
  • PackedPosBlockNum is the number of packed blocks for current term's positions, + * payloads or offsets. In particular, PackedPosBlockNum = + * floor(totalTermFreq/PackedBlockSize) + *
  • PosVIntCount is the number of positions encoded as VInt format. In particular, + * PosVIntCount = totalTermFreq - PackedPosBlockNum*PackedBlockSize + *
  • The procedure how PackedPosDeltaBlock is generated is the same as PackedDocDeltaBlock + * in chapter Frequencies and Skip Data. + *
  • PositionDelta is, if payloads are disabled for the term's field, the difference + * between the position of the current occurrence in the document and the previous + * occurrence (or zero, if this is the first occurrence in this document). If payloads + * are enabled for the term's field, then PositionDelta/2 is the difference between the + * current and the previous position. If payloads are enabled and PositionDelta is odd, + * then PayloadLength is stored, indicating the length of the payload at the current + * term position. + *
  • For example, the TermPositions for a term which occurs as the fourth term in one + * document, and as the fifth and ninth term in a subsequent document, would be the + * following sequence of VInts (payloads disabled): + *

    4, 5, 4 + *

  • PayloadData is metadata associated with the current term position. If PayloadLength + * is stored at the current position, then it indicates the length of this payload. If + * PayloadLength is not stored, then this payload has the same length as the payload at + * the previous position. + *
  • OffsetDelta/2 is the difference between this position's startOffset from the previous + * occurrence (or zero, if this is the first occurrence in this document). If + * OffsetDelta is odd, then the length (endOffset-startOffset) differs from the previous + * occurrence and an OffsetLength follows. Offset data is only written for {@link + * IndexOptions#DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS}. + *
+ *
+ * + * + * + *
+ *
Payloads and Offsets + *

The .pay file will store payloads and offsets associated with certain term-document + * positions. Some payloads and offsets will be separated out into .pos file, for performance + * reasons. + *

    + *
  • PayFile(.pay): --> Header, <TermPayloads?, TermOffsets?> + * TermCount, Footer + *
  • Header --> {@link CodecUtil#writeIndexHeader IndexHeader} + *
  • TermPayloads --> <PackedPayLengthBlock, SumPayLength, PayData> + * PackedPayBlockNum + *
  • TermOffsets --> <PackedOffsetStartDeltaBlock, PackedOffsetLengthBlock> + * PackedPayBlockNum + *
  • PackedPayLengthBlock, PackedOffsetStartDeltaBlock, PackedOffsetLengthBlock --> + * {@link PackedInts PackedInts} + *
  • SumPayLength --> {@link DataOutput#writeVInt VInt} + *
  • PayData --> {@link DataOutput#writeByte byte}SumPayLength + *
  • Footer --> {@link CodecUtil#writeFooter CodecFooter} + *
+ *

Notes: + *

    + *
  • The order of TermPayloads/TermOffsets will be the same as TermPositions, note that + * part of payload/offsets are stored in .pos. + *
  • The procedure how PackedPayLengthBlock and PackedOffsetLengthBlock are generated is + * the same as PackedFreqBlock in chapter Frequencies and Skip + * Data. While PackedStartDeltaBlock follows a same procedure as + * PackedDocDeltaBlock. + *
  • PackedPayBlockNum is always equal to PackedPosBlockNum, for the same term. It is also + * synonym for PackedOffsetBlockNum. + *
  • SumPayLength is the total length of payloads written within one block, should be the + * sum of PayLengths in one packed block. + *
  • PayLength in PackedPayLengthBlock is the length of each payload associated with the + * current position. + *
+ *
+ * + */ +public final class ES812PostingsFormat extends PostingsFormat { + + /** + * Filename extension for document number, frequencies, and skip data. See chapter: Frequencies and Skip Data + */ + public static final String DOC_EXTENSION = "doc"; + + /** Filename extension for positions. See chapter: Positions */ + public static final String POS_EXTENSION = "pos"; + + /** + * Filename extension for payloads and offsets. See chapter: Payloads and + * Offsets + */ + public static final String PAY_EXTENSION = "pay"; + + /** Size of blocks. */ + public static final int BLOCK_SIZE = ForUtil.BLOCK_SIZE; + + /** + * Expert: The maximum number of skip levels. Smaller values result in slightly smaller indexes, + * but slower skipping in big posting lists. + */ + static final int MAX_SKIP_LEVELS = 10; + + static final String CODEC_NAME = "ES812Postings"; + static final String TERMS_CODEC = "ES812PostingsWriterTerms"; + static final String DOC_CODEC = "ES812PostingsWriterDoc"; + static final String POS_CODEC = "ES812PostingsWriterPos"; + static final String PAY_CODEC = "ES812PostingsWriterPay"; + + // Increment version to change it + static final int VERSION_START = 0; + static final int VERSION_CURRENT = VERSION_START; + + /** Creates read-only {@code ES812PostingsFormat}. */ + public ES812PostingsFormat() { + super(CODEC_NAME); + } + + @Override + public String toString() { + return getName(); + } + + @Override + public FieldsConsumer fieldsConsumer(SegmentWriteState state) throws IOException { + PostingsWriterBase postingsWriter = new ES812PostingsWriter(state); + boolean success = false; + try { + FieldsConsumer ret = new Lucene90BlockTreeTermsWriter( + state, + postingsWriter, + Lucene90BlockTreeTermsWriter.DEFAULT_MIN_BLOCK_SIZE, + Lucene90BlockTreeTermsWriter.DEFAULT_MAX_BLOCK_SIZE + ); + success = true; + return ret; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(postingsWriter); + } + } + } + + @Override + public FieldsProducer fieldsProducer(SegmentReadState state) throws IOException { + PostingsReaderBase postingsReader = new ES812PostingsReader(state); + boolean success = false; + try { + FieldsProducer ret = new Lucene90BlockTreeTermsReader(postingsReader, state); + success = true; + return ret; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(postingsReader); + } + } + } + + /** + * Holds all state required for {@link ES812PostingsReader} to produce a {@link + * org.apache.lucene.index.PostingsEnum} without re-seeking the terms dict. + * + */ + public static final class IntBlockTermState extends BlockTermState { + /** file pointer to the start of the doc ids enumeration, in {@link #DOC_EXTENSION} file */ + public long docStartFP; + + /** file pointer to the start of the positions enumeration, in {@link #POS_EXTENSION} file */ + public long posStartFP; + + /** file pointer to the start of the payloads enumeration, in {@link #PAY_EXTENSION} file */ + public long payStartFP; + + /** + * file offset for the start of the skip list, relative to docStartFP, if there are more than + * {@link ForUtil#BLOCK_SIZE} docs; otherwise -1 + */ + public long skipOffset; + + /** + * file offset for the last position in the last block, if there are more than {@link + * ForUtil#BLOCK_SIZE} positions; otherwise -1 + * + *

One might think to use total term frequency to track how many positions are left to read + * as we decode the blocks, and decode the last block differently when num_left_positions < + * BLOCK_SIZE. Unfortunately this won't work since the tracking will be messed up when we skip + * blocks as the skipper will only tell us new position offset (start of block) and number of + * positions to skip for that block, without telling us how many positions it has skipped. + */ + public long lastPosBlockOffset; + + /** + * docid when there is a single pulsed posting, otherwise -1. freq is always implicitly + * totalTermFreq in this case. + */ + public int singletonDocID; + + /** Sole constructor. */ + public IntBlockTermState() { + skipOffset = -1; + lastPosBlockOffset = -1; + singletonDocID = -1; + } + + @Override + public IntBlockTermState clone() { + IntBlockTermState other = new IntBlockTermState(); + other.copyFrom(this); + return other; + } + + @Override + public void copyFrom(TermState _other) { + super.copyFrom(_other); + IntBlockTermState other = (IntBlockTermState) _other; + docStartFP = other.docStartFP; + posStartFP = other.posStartFP; + payStartFP = other.payStartFP; + lastPosBlockOffset = other.lastPosBlockOffset; + skipOffset = other.skipOffset; + singletonDocID = other.singletonDocID; + } + + @Override + public String toString() { + return super.toString() + + " docStartFP=" + + docStartFP + + " posStartFP=" + + posStartFP + + " payStartFP=" + + payStartFP + + " lastPosBlockOffset=" + + lastPosBlockOffset + + " singletonDocID=" + + singletonDocID; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsReader.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsReader.java new file mode 100644 index 0000000000000..8b3d5d02a04c0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsReader.java @@ -0,0 +1,1990 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2022 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.postings; + +import org.apache.lucene.codecs.BlockTermState; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.PostingsReaderBase; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.Impacts; +import org.apache.lucene.index.ImpactsEnum; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.IndexOptions; +import org.apache.lucene.index.PostingsEnum; +import org.apache.lucene.index.SegmentReadState; +import org.apache.lucene.index.SlowImpactsEnum; +import org.apache.lucene.store.DataInput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BitUtil; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.index.codec.postings.ES812PostingsFormat.IntBlockTermState; + +import java.io.IOException; +import java.util.Arrays; + +import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.DOC_CODEC; +import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.MAX_SKIP_LEVELS; +import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.PAY_CODEC; +import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.POS_CODEC; +import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.TERMS_CODEC; +import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.VERSION_CURRENT; +import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.VERSION_START; +import static org.elasticsearch.index.codec.postings.ForUtil.BLOCK_SIZE; + +/** + * Concrete class that reads docId(maybe frq,pos,offset,payloads) list with postings format. + * + */ +final class ES812PostingsReader extends PostingsReaderBase { + + private final IndexInput docIn; + private final IndexInput posIn; + private final IndexInput payIn; + + private final int version; + + /** Sole constructor. */ + ES812PostingsReader(SegmentReadState state) throws IOException { + boolean success = false; + IndexInput docIn = null; + IndexInput posIn = null; + IndexInput payIn = null; + + // NOTE: these data files are too costly to verify checksum against all the bytes on open, + // but for now we at least verify proper structure of the checksum footer: which looks + // for FOOTER_MAGIC + algorithmID. This is cheap and can detect some forms of corruption + // such as file truncation. + + String docName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, ES812PostingsFormat.DOC_EXTENSION); + try { + docIn = state.directory.openInput(docName, state.context); + version = CodecUtil.checkIndexHeader( + docIn, + DOC_CODEC, + VERSION_START, + VERSION_CURRENT, + state.segmentInfo.getId(), + state.segmentSuffix + ); + CodecUtil.retrieveChecksum(docIn); + + if (state.fieldInfos.hasProx()) { + String proxName = IndexFileNames.segmentFileName( + state.segmentInfo.name, + state.segmentSuffix, + ES812PostingsFormat.POS_EXTENSION + ); + posIn = state.directory.openInput(proxName, state.context); + CodecUtil.checkIndexHeader(posIn, POS_CODEC, version, version, state.segmentInfo.getId(), state.segmentSuffix); + CodecUtil.retrieveChecksum(posIn); + + if (state.fieldInfos.hasPayloads() || state.fieldInfos.hasOffsets()) { + String payName = IndexFileNames.segmentFileName( + state.segmentInfo.name, + state.segmentSuffix, + ES812PostingsFormat.PAY_EXTENSION + ); + payIn = state.directory.openInput(payName, state.context); + CodecUtil.checkIndexHeader(payIn, PAY_CODEC, version, version, state.segmentInfo.getId(), state.segmentSuffix); + CodecUtil.retrieveChecksum(payIn); + } + } + + this.docIn = docIn; + this.posIn = posIn; + this.payIn = payIn; + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(docIn, posIn, payIn); + } + } + } + + @Override + public void init(IndexInput termsIn, SegmentReadState state) throws IOException { + // Make sure we are talking to the matching postings writer + CodecUtil.checkIndexHeader(termsIn, TERMS_CODEC, VERSION_START, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); + final int indexBlockSize = termsIn.readVInt(); + if (indexBlockSize != BLOCK_SIZE) { + throw new IllegalStateException("index-time BLOCK_SIZE (" + indexBlockSize + ") != read-time BLOCK_SIZE (" + BLOCK_SIZE + ")"); + } + } + + /** Read values that have been written using variable-length encoding instead of bit-packing. */ + static void readVIntBlock(IndexInput docIn, long[] docBuffer, long[] freqBuffer, int num, boolean indexHasFreq) throws IOException { + if (indexHasFreq) { + for (int i = 0; i < num; i++) { + final int code = docIn.readVInt(); + docBuffer[i] = code >>> 1; + if ((code & 1) != 0) { + freqBuffer[i] = 1; + } else { + freqBuffer[i] = docIn.readVInt(); + } + } + } else { + for (int i = 0; i < num; i++) { + docBuffer[i] = docIn.readVInt(); + } + } + } + + static void prefixSum(long[] buffer, int count, long base) { + buffer[0] += base; + for (int i = 1; i < count; ++i) { + buffer[i] += buffer[i - 1]; + } + } + + static int findFirstGreater(long[] buffer, int target, int from) { + for (int i = from; i < BLOCK_SIZE; ++i) { + if (buffer[i] >= target) { + return i; + } + } + return BLOCK_SIZE; + } + + @Override + public BlockTermState newTermState() { + return new IntBlockTermState(); + } + + @Override + public void close() throws IOException { + IOUtils.close(docIn, posIn, payIn); + } + + @Override + public void decodeTerm(DataInput in, FieldInfo fieldInfo, BlockTermState _termState, boolean absolute) throws IOException { + final IntBlockTermState termState = (IntBlockTermState) _termState; + final boolean fieldHasPositions = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; + final boolean fieldHasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; + final boolean fieldHasPayloads = fieldInfo.hasPayloads(); + + if (absolute) { + termState.docStartFP = 0; + termState.posStartFP = 0; + termState.payStartFP = 0; + } + + final long l = in.readVLong(); + if ((l & 0x01) == 0) { + termState.docStartFP += l >>> 1; + if (termState.docFreq == 1) { + termState.singletonDocID = in.readVInt(); + } else { + termState.singletonDocID = -1; + } + } else { + assert absolute == false; + assert termState.singletonDocID != -1; + termState.singletonDocID += (int) BitUtil.zigZagDecode(l >>> 1); + } + + if (fieldHasPositions) { + termState.posStartFP += in.readVLong(); + if (fieldHasOffsets || fieldHasPayloads) { + termState.payStartFP += in.readVLong(); + } + if (termState.totalTermFreq > BLOCK_SIZE) { + termState.lastPosBlockOffset = in.readVLong(); + } else { + termState.lastPosBlockOffset = -1; + } + } + + if (termState.docFreq > BLOCK_SIZE) { + termState.skipOffset = in.readVLong(); + } else { + termState.skipOffset = -1; + } + } + + @Override + public PostingsEnum postings(FieldInfo fieldInfo, BlockTermState termState, PostingsEnum reuse, int flags) throws IOException { + + boolean indexHasPositions = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; + + if (indexHasPositions == false || PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS) == false) { + BlockDocsEnum docsEnum; + if (reuse instanceof BlockDocsEnum) { + docsEnum = (BlockDocsEnum) reuse; + if (docsEnum.canReuse(docIn, fieldInfo) == false) { + docsEnum = new BlockDocsEnum(fieldInfo); + } + } else { + docsEnum = new BlockDocsEnum(fieldInfo); + } + return docsEnum.reset((IntBlockTermState) termState, flags); + } else { + EverythingEnum everythingEnum; + if (reuse instanceof EverythingEnum) { + everythingEnum = (EverythingEnum) reuse; + if (everythingEnum.canReuse(docIn, fieldInfo) == false) { + everythingEnum = new EverythingEnum(fieldInfo); + } + } else { + everythingEnum = new EverythingEnum(fieldInfo); + } + return everythingEnum.reset((IntBlockTermState) termState, flags); + } + } + + @Override + public ImpactsEnum impacts(FieldInfo fieldInfo, BlockTermState state, int flags) throws IOException { + if (state.docFreq <= BLOCK_SIZE) { + // no skip data + return new SlowImpactsEnum(postings(fieldInfo, state, null, flags)); + } + + final boolean indexHasPositions = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; + final boolean indexHasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; + final boolean indexHasPayloads = fieldInfo.hasPayloads(); + + if (indexHasPositions == false || PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS) == false) { + return new BlockImpactsDocsEnum(fieldInfo, (IntBlockTermState) state); + } + + if (indexHasPositions + && PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS) + && (indexHasOffsets == false || PostingsEnum.featureRequested(flags, PostingsEnum.OFFSETS) == false) + && (indexHasPayloads == false || PostingsEnum.featureRequested(flags, PostingsEnum.PAYLOADS) == false)) { + return new BlockImpactsPostingsEnum(fieldInfo, (IntBlockTermState) state); + } + + return new BlockImpactsEverythingEnum(fieldInfo, (IntBlockTermState) state, flags); + } + + final class BlockDocsEnum extends PostingsEnum { + + final PForUtil pforUtil = new PForUtil(new ForUtil()); + + private final long[] docBuffer = new long[BLOCK_SIZE + 1]; + private final long[] freqBuffer = new long[BLOCK_SIZE]; + + private int docBufferUpto; + + private ES812SkipReader skipper; + private boolean skipped; + + final IndexInput startDocIn; + + IndexInput docIn; + final boolean indexHasFreq; + final boolean indexHasPos; + final boolean indexHasOffsets; + final boolean indexHasPayloads; + + private int docFreq; // number of docs in this posting list + private long totalTermFreq; // sum of freqBuffer in this posting list (or docFreq when omitted) + private int blockUpto; // number of docs in or before the current block + private int doc; // doc we last read + private long accum; // accumulator for doc deltas + + // Where this term's postings start in the .doc file: + private long docTermStartFP; + + // Where this term's skip data starts (after + // docTermStartFP) in the .doc file (or -1 if there is + // no skip data for this term): + private long skipOffset; + + // docID for next skip point, we won't use skipper if + // target docID is not larger than this + private int nextSkipDoc; + + private boolean needsFreq; // true if the caller actually needs frequencies + // as we read freqBuffer lazily, isFreqsRead shows if freqBuffer are read for the current block + // always true when we don't have freqBuffer (indexHasFreq=false) or don't need freqBuffer + // (needsFreq=false) + private boolean isFreqsRead; + private int singletonDocID; // docid when there is a single pulsed posting, otherwise -1 + + BlockDocsEnum(FieldInfo fieldInfo) throws IOException { + this.startDocIn = ES812PostingsReader.this.docIn; + this.docIn = null; + indexHasFreq = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) >= 0; + indexHasPos = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; + indexHasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; + indexHasPayloads = fieldInfo.hasPayloads(); + // We set the last element of docBuffer to NO_MORE_DOCS, it helps save conditionals in + // advance() + docBuffer[BLOCK_SIZE] = NO_MORE_DOCS; + } + + public boolean canReuse(IndexInput docIn, FieldInfo fieldInfo) { + return docIn == startDocIn + && indexHasFreq == (fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) >= 0) + && indexHasPos == (fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0) + && indexHasPayloads == fieldInfo.hasPayloads(); + } + + public PostingsEnum reset(IntBlockTermState termState, int flags) throws IOException { + docFreq = termState.docFreq; + totalTermFreq = indexHasFreq ? termState.totalTermFreq : docFreq; + docTermStartFP = termState.docStartFP; + skipOffset = termState.skipOffset; + singletonDocID = termState.singletonDocID; + if (docFreq > 1) { + if (docIn == null) { + // lazy init + docIn = startDocIn.clone(); + } + docIn.seek(docTermStartFP); + } + + doc = -1; + this.needsFreq = PostingsEnum.featureRequested(flags, PostingsEnum.FREQS); + this.isFreqsRead = true; + if (indexHasFreq == false || needsFreq == false) { + for (int i = 0; i < ForUtil.BLOCK_SIZE; ++i) { + freqBuffer[i] = 1; + } + } + accum = 0; + blockUpto = 0; + nextSkipDoc = BLOCK_SIZE - 1; // we won't skip if target is found in first block + docBufferUpto = BLOCK_SIZE; + skipped = false; + return this; + } + + @Override + public int freq() throws IOException { + if (isFreqsRead == false) { + pforUtil.decode(docIn, freqBuffer); // read freqBuffer for this block + isFreqsRead = true; + } + return (int) freqBuffer[docBufferUpto - 1]; + } + + @Override + public int nextPosition() throws IOException { + return -1; + } + + @Override + public int startOffset() throws IOException { + return -1; + } + + @Override + public int endOffset() throws IOException { + return -1; + } + + @Override + public BytesRef getPayload() throws IOException { + return null; + } + + @Override + public int docID() { + return doc; + } + + private void refillDocs() throws IOException { + // Check if we skipped reading the previous block of freqBuffer, and if yes, position docIn + // after it + if (isFreqsRead == false) { + pforUtil.skip(docIn); + isFreqsRead = true; + } + + final int left = docFreq - blockUpto; + assert left >= 0; + + if (left >= BLOCK_SIZE) { + pforUtil.decodeAndPrefixSum(docIn, accum, docBuffer); + + if (indexHasFreq) { + if (needsFreq) { + isFreqsRead = false; + } else { + pforUtil.skip(docIn); // skip over freqBuffer if we don't need them at all + } + } + blockUpto += BLOCK_SIZE; + } else if (docFreq == 1) { + docBuffer[0] = singletonDocID; + freqBuffer[0] = totalTermFreq; + docBuffer[1] = NO_MORE_DOCS; + blockUpto++; + } else { + // Read vInts: + readVIntBlock(docIn, docBuffer, freqBuffer, left, indexHasFreq); + prefixSum(docBuffer, left, accum); + docBuffer[left] = NO_MORE_DOCS; + blockUpto += left; + } + accum = docBuffer[BLOCK_SIZE - 1]; + docBufferUpto = 0; + assert docBuffer[BLOCK_SIZE] == NO_MORE_DOCS; + } + + @Override + public int nextDoc() throws IOException { + if (docBufferUpto == BLOCK_SIZE) { + refillDocs(); // we don't need to load freqBuffer for now (will be loaded later if + // necessary) + } + + doc = (int) docBuffer[docBufferUpto]; + docBufferUpto++; + return doc; + } + + @Override + public int advance(int target) throws IOException { + // current skip docID < docIDs generated from current buffer <= next skip docID + // we don't need to skip if target is buffered already + if (docFreq > BLOCK_SIZE && target > nextSkipDoc) { + + if (skipper == null) { + // Lazy init: first time this enum has ever been used for skipping + skipper = new ES812SkipReader(docIn.clone(), MAX_SKIP_LEVELS, indexHasPos, indexHasOffsets, indexHasPayloads); + } + + if (skipped == false) { + assert skipOffset != -1; + // This is the first time this enum has skipped + // since reset() was called; load the skip data: + skipper.init(docTermStartFP + skipOffset, docTermStartFP, 0, 0, docFreq); + skipped = true; + } + + // always plus one to fix the result, since skip position in Lucene90SkipReader + // is a little different from MultiLevelSkipListReader + final int newDocUpto = skipper.skipTo(target) + 1; + + if (newDocUpto >= blockUpto) { + // Skipper moved + assert newDocUpto % BLOCK_SIZE == 0 : "got " + newDocUpto; + blockUpto = newDocUpto; + + // Force to read next block + docBufferUpto = BLOCK_SIZE; + accum = skipper.getDoc(); // actually, this is just lastSkipEntry + docIn.seek(skipper.getDocPointer()); // now point to the block we want to search + // even if freqBuffer were not read from the previous block, we will mark them as read, + // as we don't need to skip the previous block freqBuffer in refillDocs, + // as we have already positioned docIn where in needs to be. + isFreqsRead = true; + } + // next time we call advance, this is used to + // foresee whether skipper is necessary. + nextSkipDoc = skipper.getNextSkipDoc(); + } + if (docBufferUpto == BLOCK_SIZE) { + refillDocs(); + } + + // Now scan... this is an inlined/pared down version + // of nextDoc(): + long doc; + while (true) { + doc = docBuffer[docBufferUpto]; + + if (doc >= target) { + break; + } + ++docBufferUpto; + } + + docBufferUpto++; + return this.doc = (int) doc; + } + + @Override + public long cost() { + return docFreq; + } + } + + // Also handles payloads + offsets + final class EverythingEnum extends PostingsEnum { + + final PForUtil pforUtil = new PForUtil(new ForUtil()); + + private final long[] docBuffer = new long[BLOCK_SIZE + 1]; + private final long[] freqBuffer = new long[BLOCK_SIZE + 1]; + private final long[] posDeltaBuffer = new long[BLOCK_SIZE]; + + private final long[] payloadLengthBuffer; + private final long[] offsetStartDeltaBuffer; + private final long[] offsetLengthBuffer; + + private byte[] payloadBytes; + private int payloadByteUpto; + private int payloadLength; + + private int lastStartOffset; + private int startOffset; + private int endOffset; + + private int docBufferUpto; + private int posBufferUpto; + + private ES812SkipReader skipper; + private boolean skipped; + + final IndexInput startDocIn; + + IndexInput docIn; + final IndexInput posIn; + final IndexInput payIn; + final BytesRef payload; + + final boolean indexHasOffsets; + final boolean indexHasPayloads; + + private int docFreq; // number of docs in this posting list + private long totalTermFreq; // number of positions in this posting list + private int blockUpto; // number of docs in or before the current block + private int doc; // doc we last read + private long accum; // accumulator for doc deltas + private int freq; // freq we last read + private int position; // current position + + // how many positions "behind" we are; nextPosition must + // skip these to "catch up": + private int posPendingCount; + + // Lazy pos seek: if != -1 then we must seek to this FP + // before reading positions: + private long posPendingFP; + + // Lazy pay seek: if != -1 then we must seek to this FP + // before reading payloads/offsets: + private long payPendingFP; + + // Where this term's postings start in the .doc file: + private long docTermStartFP; + + // Where this term's postings start in the .pos file: + private long posTermStartFP; + + // Where this term's payloads/offsets start in the .pay + // file: + private long payTermStartFP; + + // File pointer where the last (vInt encoded) pos delta + // block is. We need this to know whether to bulk + // decode vs vInt decode the block: + private long lastPosBlockFP; + + // Where this term's skip data starts (after + // docTermStartFP) in the .doc file (or -1 if there is + // no skip data for this term): + private long skipOffset; + + private int nextSkipDoc; + + private boolean needsOffsets; // true if we actually need offsets + private boolean needsPayloads; // true if we actually need payloads + private int singletonDocID; // docid when there is a single pulsed posting, otherwise -1 + + EverythingEnum(FieldInfo fieldInfo) throws IOException { + indexHasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; + indexHasPayloads = fieldInfo.hasPayloads(); + + this.startDocIn = ES812PostingsReader.this.docIn; + this.docIn = null; + this.posIn = ES812PostingsReader.this.posIn.clone(); + if (indexHasOffsets || indexHasPayloads) { + this.payIn = ES812PostingsReader.this.payIn.clone(); + } else { + this.payIn = null; + } + if (indexHasOffsets) { + offsetStartDeltaBuffer = new long[BLOCK_SIZE]; + offsetLengthBuffer = new long[BLOCK_SIZE]; + } else { + offsetStartDeltaBuffer = null; + offsetLengthBuffer = null; + startOffset = -1; + endOffset = -1; + } + + if (indexHasPayloads) { + payloadLengthBuffer = new long[BLOCK_SIZE]; + payloadBytes = new byte[128]; + payload = new BytesRef(); + } else { + payloadLengthBuffer = null; + payloadBytes = null; + payload = null; + } + + // We set the last element of docBuffer to NO_MORE_DOCS, it helps save conditionals in + // advance() + docBuffer[BLOCK_SIZE] = NO_MORE_DOCS; + } + + public boolean canReuse(IndexInput docIn, FieldInfo fieldInfo) { + return docIn == startDocIn + && indexHasOffsets == (fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0) + && indexHasPayloads == fieldInfo.hasPayloads(); + } + + public EverythingEnum reset(IntBlockTermState termState, int flags) throws IOException { + docFreq = termState.docFreq; + docTermStartFP = termState.docStartFP; + posTermStartFP = termState.posStartFP; + payTermStartFP = termState.payStartFP; + skipOffset = termState.skipOffset; + totalTermFreq = termState.totalTermFreq; + singletonDocID = termState.singletonDocID; + if (docFreq > 1) { + if (docIn == null) { + // lazy init + docIn = startDocIn.clone(); + } + docIn.seek(docTermStartFP); + } + posPendingFP = posTermStartFP; + payPendingFP = payTermStartFP; + posPendingCount = 0; + if (termState.totalTermFreq < BLOCK_SIZE) { + lastPosBlockFP = posTermStartFP; + } else if (termState.totalTermFreq == BLOCK_SIZE) { + lastPosBlockFP = -1; + } else { + lastPosBlockFP = posTermStartFP + termState.lastPosBlockOffset; + } + + this.needsOffsets = PostingsEnum.featureRequested(flags, PostingsEnum.OFFSETS); + this.needsPayloads = PostingsEnum.featureRequested(flags, PostingsEnum.PAYLOADS); + + doc = -1; + accum = 0; + blockUpto = 0; + if (docFreq > BLOCK_SIZE) { + nextSkipDoc = BLOCK_SIZE - 1; // we won't skip if target is found in first block + } else { + nextSkipDoc = NO_MORE_DOCS; // not enough docs for skipping + } + docBufferUpto = BLOCK_SIZE; + skipped = false; + return this; + } + + @Override + public int freq() throws IOException { + return freq; + } + + @Override + public int docID() { + return doc; + } + + private void refillDocs() throws IOException { + final int left = docFreq - blockUpto; + assert left >= 0; + + if (left >= BLOCK_SIZE) { + pforUtil.decodeAndPrefixSum(docIn, accum, docBuffer); + pforUtil.decode(docIn, freqBuffer); + blockUpto += BLOCK_SIZE; + } else if (docFreq == 1) { + docBuffer[0] = singletonDocID; + freqBuffer[0] = totalTermFreq; + docBuffer[1] = NO_MORE_DOCS; + blockUpto++; + } else { + readVIntBlock(docIn, docBuffer, freqBuffer, left, true); + prefixSum(docBuffer, left, accum); + docBuffer[left] = NO_MORE_DOCS; + blockUpto += left; + } + accum = docBuffer[BLOCK_SIZE - 1]; + docBufferUpto = 0; + assert docBuffer[BLOCK_SIZE] == NO_MORE_DOCS; + } + + private void refillPositions() throws IOException { + if (posIn.getFilePointer() == lastPosBlockFP) { + final int count = (int) (totalTermFreq % BLOCK_SIZE); + int payloadLength = 0; + int offsetLength = 0; + payloadByteUpto = 0; + for (int i = 0; i < count; i++) { + int code = posIn.readVInt(); + if (indexHasPayloads) { + if ((code & 1) != 0) { + payloadLength = posIn.readVInt(); + } + payloadLengthBuffer[i] = payloadLength; + posDeltaBuffer[i] = code >>> 1; + if (payloadLength != 0) { + if (payloadByteUpto + payloadLength > payloadBytes.length) { + payloadBytes = ArrayUtil.grow(payloadBytes, payloadByteUpto + payloadLength); + } + posIn.readBytes(payloadBytes, payloadByteUpto, payloadLength); + payloadByteUpto += payloadLength; + } + } else { + posDeltaBuffer[i] = code; + } + + if (indexHasOffsets) { + int deltaCode = posIn.readVInt(); + if ((deltaCode & 1) != 0) { + offsetLength = posIn.readVInt(); + } + offsetStartDeltaBuffer[i] = deltaCode >>> 1; + offsetLengthBuffer[i] = offsetLength; + } + } + payloadByteUpto = 0; + } else { + pforUtil.decode(posIn, posDeltaBuffer); + + if (indexHasPayloads) { + if (needsPayloads) { + pforUtil.decode(payIn, payloadLengthBuffer); + int numBytes = payIn.readVInt(); + + if (numBytes > payloadBytes.length) { + payloadBytes = ArrayUtil.growNoCopy(payloadBytes, numBytes); + } + payIn.readBytes(payloadBytes, 0, numBytes); + } else { + // this works, because when writing a vint block we always force the first length to be + // written + pforUtil.skip(payIn); // skip over lengths + int numBytes = payIn.readVInt(); // read length of payloadBytes + payIn.seek(payIn.getFilePointer() + numBytes); // skip over payloadBytes + } + payloadByteUpto = 0; + } + + if (indexHasOffsets) { + if (needsOffsets) { + pforUtil.decode(payIn, offsetStartDeltaBuffer); + pforUtil.decode(payIn, offsetLengthBuffer); + } else { + // this works, because when writing a vint block we always force the first length to be + // written + pforUtil.skip(payIn); // skip over starts + pforUtil.skip(payIn); // skip over lengths + } + } + } + } + + @Override + public int nextDoc() throws IOException { + if (docBufferUpto == BLOCK_SIZE) { + refillDocs(); + } + + doc = (int) docBuffer[docBufferUpto]; + freq = (int) freqBuffer[docBufferUpto]; + posPendingCount += freq; + docBufferUpto++; + + position = 0; + lastStartOffset = 0; + return doc; + } + + @Override + public int advance(int target) throws IOException { + if (target > nextSkipDoc) { + if (skipper == null) { + // Lazy init: first time this enum has ever been used for skipping + skipper = new ES812SkipReader(docIn.clone(), MAX_SKIP_LEVELS, true, indexHasOffsets, indexHasPayloads); + } + + if (skipped == false) { + assert skipOffset != -1; + // This is the first time this enum has skipped + // since reset() was called; load the skip data: + skipper.init(docTermStartFP + skipOffset, docTermStartFP, posTermStartFP, payTermStartFP, docFreq); + skipped = true; + } + + final int newDocUpto = skipper.skipTo(target) + 1; + + if (newDocUpto > blockUpto - BLOCK_SIZE + docBufferUpto) { + // Skipper moved + assert newDocUpto % BLOCK_SIZE == 0 : "got " + newDocUpto; + blockUpto = newDocUpto; + + // Force to read next block + docBufferUpto = BLOCK_SIZE; + accum = skipper.getDoc(); + docIn.seek(skipper.getDocPointer()); + posPendingFP = skipper.getPosPointer(); + payPendingFP = skipper.getPayPointer(); + posPendingCount = skipper.getPosBufferUpto(); + lastStartOffset = 0; // new document + payloadByteUpto = skipper.getPayloadByteUpto(); + } + nextSkipDoc = skipper.getNextSkipDoc(); + } + if (docBufferUpto == BLOCK_SIZE) { + refillDocs(); + } + + // Now scan: + long doc; + while (true) { + doc = docBuffer[docBufferUpto]; + freq = (int) freqBuffer[docBufferUpto]; + posPendingCount += freq; + docBufferUpto++; + + if (doc >= target) { + break; + } + } + + position = 0; + lastStartOffset = 0; + return this.doc = (int) doc; + } + + // TODO: in theory we could avoid loading frq block + // when not needed, ie, use skip data to load how far to + // seek the pos pointer ... instead of having to load frq + // blocks only to sum up how many positions to skip + private void skipPositions() throws IOException { + // Skip positions now: + int toSkip = posPendingCount - freq; + // if (DEBUG) { + // System.out.println(" FPR.skipPositions: toSkip=" + toSkip); + // } + + final int leftInBlock = BLOCK_SIZE - posBufferUpto; + if (toSkip < leftInBlock) { + int end = posBufferUpto + toSkip; + while (posBufferUpto < end) { + if (indexHasPayloads) { + payloadByteUpto += (int) payloadLengthBuffer[posBufferUpto]; + } + posBufferUpto++; + } + } else { + toSkip -= leftInBlock; + while (toSkip >= BLOCK_SIZE) { + assert posIn.getFilePointer() != lastPosBlockFP; + pforUtil.skip(posIn); + + if (indexHasPayloads) { + // Skip payloadLength block: + pforUtil.skip(payIn); + + // Skip payloadBytes block: + int numBytes = payIn.readVInt(); + payIn.seek(payIn.getFilePointer() + numBytes); + } + + if (indexHasOffsets) { + pforUtil.skip(payIn); + pforUtil.skip(payIn); + } + toSkip -= BLOCK_SIZE; + } + refillPositions(); + payloadByteUpto = 0; + posBufferUpto = 0; + while (posBufferUpto < toSkip) { + if (indexHasPayloads) { + payloadByteUpto += (int) payloadLengthBuffer[posBufferUpto]; + } + posBufferUpto++; + } + } + + position = 0; + lastStartOffset = 0; + } + + @Override + public int nextPosition() throws IOException { + assert posPendingCount > 0; + + if (posPendingFP != -1) { + posIn.seek(posPendingFP); + posPendingFP = -1; + + if (payPendingFP != -1 && payIn != null) { + payIn.seek(payPendingFP); + payPendingFP = -1; + } + + // Force buffer refill: + posBufferUpto = BLOCK_SIZE; + } + + if (posPendingCount > freq) { + skipPositions(); + posPendingCount = freq; + } + + if (posBufferUpto == BLOCK_SIZE) { + refillPositions(); + posBufferUpto = 0; + } + position += (int) posDeltaBuffer[posBufferUpto]; + + if (indexHasPayloads) { + payloadLength = (int) payloadLengthBuffer[posBufferUpto]; + payload.bytes = payloadBytes; + payload.offset = payloadByteUpto; + payload.length = payloadLength; + payloadByteUpto += payloadLength; + } + + if (indexHasOffsets) { + startOffset = lastStartOffset + (int) offsetStartDeltaBuffer[posBufferUpto]; + endOffset = startOffset + (int) offsetLengthBuffer[posBufferUpto]; + lastStartOffset = startOffset; + } + + posBufferUpto++; + posPendingCount--; + return position; + } + + @Override + public int startOffset() { + return startOffset; + } + + @Override + public int endOffset() { + return endOffset; + } + + @Override + public BytesRef getPayload() { + if (payloadLength == 0) { + return null; + } else { + return payload; + } + } + + @Override + public long cost() { + return docFreq; + } + } + + final class BlockImpactsDocsEnum extends ImpactsEnum { + + final PForUtil pforUtil = new PForUtil(new ForUtil()); + + private final long[] docBuffer = new long[BLOCK_SIZE + 1]; + private final long[] freqBuffer = new long[BLOCK_SIZE]; + + private int docBufferUpto; + + private final ES812ScoreSkipReader skipper; + + final IndexInput docIn; + + final boolean indexHasFreqs; + + private int docFreq; // number of docs in this posting list + private int blockUpto; // number of documents in or before the current block + private int doc; // doc we last read + private long accum; // accumulator for doc deltas + + private int nextSkipDoc = -1; + + private long seekTo = -1; + + // as we read freqBuffer lazily, isFreqsRead shows if freqBuffer are read for the current block + // always true when we don't have freqBuffer (indexHasFreq=false) or don't need freqBuffer + // (needsFreq=false) + private boolean isFreqsRead; + + BlockImpactsDocsEnum(FieldInfo fieldInfo, IntBlockTermState termState) throws IOException { + indexHasFreqs = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) >= 0; + final boolean indexHasPositions = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; + final boolean indexHasOffsets = fieldInfo.getIndexOptions() + .compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; + final boolean indexHasPayloads = fieldInfo.hasPayloads(); + + this.docIn = ES812PostingsReader.this.docIn.clone(); + + docFreq = termState.docFreq; + docIn.seek(termState.docStartFP); + + doc = -1; + accum = 0; + blockUpto = 0; + docBufferUpto = BLOCK_SIZE; + + skipper = new ES812ScoreSkipReader(docIn.clone(), MAX_SKIP_LEVELS, indexHasPositions, indexHasOffsets, indexHasPayloads); + skipper.init( + termState.docStartFP + termState.skipOffset, + termState.docStartFP, + termState.posStartFP, + termState.payStartFP, + docFreq + ); + + // We set the last element of docBuffer to NO_MORE_DOCS, it helps save conditionals in + // advance() + docBuffer[BLOCK_SIZE] = NO_MORE_DOCS; + this.isFreqsRead = true; + if (indexHasFreqs == false) { + Arrays.fill(freqBuffer, 1L); + } + } + + @Override + public int freq() throws IOException { + if (isFreqsRead == false) { + pforUtil.decode(docIn, freqBuffer); // read freqBuffer for this block + isFreqsRead = true; + } + return (int) freqBuffer[docBufferUpto - 1]; + } + + @Override + public int docID() { + return doc; + } + + private void refillDocs() throws IOException { + // Check if we skipped reading the previous block of freqBuffer, and if yes, position docIn + // after it + if (isFreqsRead == false) { + pforUtil.skip(docIn); + isFreqsRead = true; + } + + final int left = docFreq - blockUpto; + assert left >= 0; + + if (left >= BLOCK_SIZE) { + pforUtil.decodeAndPrefixSum(docIn, accum, docBuffer); + if (indexHasFreqs) { + isFreqsRead = false; + } + blockUpto += BLOCK_SIZE; + } else { + readVIntBlock(docIn, docBuffer, freqBuffer, left, indexHasFreqs); + prefixSum(docBuffer, left, accum); + docBuffer[left] = NO_MORE_DOCS; + blockUpto += left; + } + accum = docBuffer[BLOCK_SIZE - 1]; + docBufferUpto = 0; + assert docBuffer[BLOCK_SIZE] == NO_MORE_DOCS; + } + + @Override + public void advanceShallow(int target) throws IOException { + if (target > nextSkipDoc) { + // always plus one to fix the result, since skip position in Lucene90SkipReader + // is a little different from MultiLevelSkipListReader + final int newDocUpto = skipper.skipTo(target) + 1; + + if (newDocUpto >= blockUpto) { + // Skipper moved + assert newDocUpto % BLOCK_SIZE == 0 : "got " + newDocUpto; + blockUpto = newDocUpto; + + // Force to read next block + docBufferUpto = BLOCK_SIZE; + accum = skipper.getDoc(); + seekTo = skipper.getDocPointer(); // delay the seek + } + // next time we call advance, this is used to + // foresee whether skipper is necessary. + nextSkipDoc = skipper.getNextSkipDoc(); + } + assert nextSkipDoc >= target; + } + + @Override + public Impacts getImpacts() throws IOException { + // nextDoc() doesn't advance skip lists, so it's important to do it here to make sure we're + // not returning impacts over a bigger range of doc IDs than necessary. + advanceShallow(doc); + return skipper.getImpacts(); + } + + @Override + public int nextDoc() throws IOException { + if (docBufferUpto == BLOCK_SIZE) { + if (seekTo >= 0) { + docIn.seek(seekTo); + isFreqsRead = true; // reset isFreqsRead + seekTo = -1; + } + refillDocs(); + } + return this.doc = (int) docBuffer[docBufferUpto++]; + } + + @Override + public int advance(int target) throws IOException { + if (target > nextSkipDoc) { + advanceShallow(target); + } + if (docBufferUpto == BLOCK_SIZE) { + if (seekTo >= 0) { + docIn.seek(seekTo); + isFreqsRead = true; // reset isFreqsRead + seekTo = -1; + } + refillDocs(); + } + + int next = findFirstGreater(docBuffer, target, docBufferUpto); + this.doc = (int) docBuffer[next]; + docBufferUpto = next + 1; + return doc; + } + + @Override + public int nextPosition() throws IOException { + return -1; + } + + @Override + public int startOffset() { + return -1; + } + + @Override + public int endOffset() { + return -1; + } + + @Override + public BytesRef getPayload() { + return null; + } + + @Override + public long cost() { + return docFreq; + } + } + + final class BlockImpactsPostingsEnum extends ImpactsEnum { + + final PForUtil pforUtil = new PForUtil(new ForUtil()); + + private final long[] docBuffer = new long[BLOCK_SIZE]; + private final long[] freqBuffer = new long[BLOCK_SIZE]; + private final long[] posDeltaBuffer = new long[BLOCK_SIZE]; + + private int docBufferUpto; + private int posBufferUpto; + + private final ES812ScoreSkipReader skipper; + + final IndexInput docIn; + final IndexInput posIn; + + final boolean indexHasOffsets; + final boolean indexHasPayloads; + + private int docFreq; // number of docs in this posting list + private long totalTermFreq; // number of positions in this posting list + private int docUpto; // how many docs we've read + private int doc; // doc we last read + private long accum; // accumulator for doc deltas + private int freq; // freq we last read + private int position; // current position + + // how many positions "behind" we are; nextPosition must + // skip these to "catch up": + private int posPendingCount; + + // Lazy pos seek: if != -1 then we must seek to this FP + // before reading positions: + private long posPendingFP; + + // Where this term's postings start in the .doc file: + private long docTermStartFP; + + // Where this term's postings start in the .pos file: + private long posTermStartFP; + + // Where this term's payloads/offsets start in the .pay + // file: + private long payTermStartFP; + + // File pointer where the last (vInt encoded) pos delta + // block is. We need this to know whether to bulk + // decode vs vInt decode the block: + private long lastPosBlockFP; + + private int nextSkipDoc = -1; + + private long seekTo = -1; + + BlockImpactsPostingsEnum(FieldInfo fieldInfo, IntBlockTermState termState) throws IOException { + indexHasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; + indexHasPayloads = fieldInfo.hasPayloads(); + + this.docIn = ES812PostingsReader.this.docIn.clone(); + + this.posIn = ES812PostingsReader.this.posIn.clone(); + + docFreq = termState.docFreq; + docTermStartFP = termState.docStartFP; + posTermStartFP = termState.posStartFP; + payTermStartFP = termState.payStartFP; + totalTermFreq = termState.totalTermFreq; + docIn.seek(docTermStartFP); + posPendingFP = posTermStartFP; + posPendingCount = 0; + if (termState.totalTermFreq < BLOCK_SIZE) { + lastPosBlockFP = posTermStartFP; + } else if (termState.totalTermFreq == BLOCK_SIZE) { + lastPosBlockFP = -1; + } else { + lastPosBlockFP = posTermStartFP + termState.lastPosBlockOffset; + } + + doc = -1; + accum = 0; + docUpto = 0; + docBufferUpto = BLOCK_SIZE; + + skipper = new ES812ScoreSkipReader(docIn.clone(), MAX_SKIP_LEVELS, true, indexHasOffsets, indexHasPayloads); + skipper.init(docTermStartFP + termState.skipOffset, docTermStartFP, posTermStartFP, payTermStartFP, docFreq); + } + + @Override + public int freq() throws IOException { + return freq; + } + + @Override + public int docID() { + return doc; + } + + private void refillDocs() throws IOException { + final int left = docFreq - docUpto; + assert left >= 0; + + if (left >= BLOCK_SIZE) { + pforUtil.decodeAndPrefixSum(docIn, accum, docBuffer); + pforUtil.decode(docIn, freqBuffer); + } else { + readVIntBlock(docIn, docBuffer, freqBuffer, left, true); + prefixSum(docBuffer, left, accum); + docBuffer[left] = NO_MORE_DOCS; + } + accum = docBuffer[BLOCK_SIZE - 1]; + docBufferUpto = 0; + } + + private void refillPositions() throws IOException { + if (posIn.getFilePointer() == lastPosBlockFP) { + final int count = (int) (totalTermFreq % BLOCK_SIZE); + int payloadLength = 0; + for (int i = 0; i < count; i++) { + int code = posIn.readVInt(); + if (indexHasPayloads) { + if ((code & 1) != 0) { + payloadLength = posIn.readVInt(); + } + posDeltaBuffer[i] = code >>> 1; + if (payloadLength != 0) { + posIn.seek(posIn.getFilePointer() + payloadLength); + } + } else { + posDeltaBuffer[i] = code; + } + if (indexHasOffsets) { + if ((posIn.readVInt() & 1) != 0) { + // offset length changed + posIn.readVInt(); + } + } + } + } else { + pforUtil.decode(posIn, posDeltaBuffer); + } + } + + @Override + public void advanceShallow(int target) throws IOException { + if (target > nextSkipDoc) { + // always plus one to fix the result, since skip position in Lucene90SkipReader + // is a little different from MultiLevelSkipListReader + final int newDocUpto = skipper.skipTo(target) + 1; + + if (newDocUpto > docUpto) { + // Skipper moved + assert newDocUpto % BLOCK_SIZE == 0 : "got " + newDocUpto; + docUpto = newDocUpto; + + // Force to read next block + docBufferUpto = BLOCK_SIZE; + accum = skipper.getDoc(); + posPendingFP = skipper.getPosPointer(); + posPendingCount = skipper.getPosBufferUpto(); + seekTo = skipper.getDocPointer(); // delay the seek + } + // next time we call advance, this is used to + // foresee whether skipper is necessary. + nextSkipDoc = skipper.getNextSkipDoc(); + } + assert nextSkipDoc >= target; + } + + @Override + public Impacts getImpacts() throws IOException { + advanceShallow(doc); + return skipper.getImpacts(); + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int advance(int target) throws IOException { + if (target > nextSkipDoc) { + advanceShallow(target); + } + if (docBufferUpto == BLOCK_SIZE) { + if (seekTo >= 0) { + docIn.seek(seekTo); + seekTo = -1; + } + refillDocs(); + } + + int next = findFirstGreater(docBuffer, target, docBufferUpto); + if (next == BLOCK_SIZE) { + return doc = NO_MORE_DOCS; + } + this.doc = (int) docBuffer[next]; + this.freq = (int) freqBuffer[next]; + for (int i = docBufferUpto; i <= next; ++i) { + posPendingCount += (int) freqBuffer[i]; + } + docUpto += next - docBufferUpto + 1; + docBufferUpto = next + 1; + position = 0; + return doc; + } + + // TODO: in theory we could avoid loading frq block + // when not needed, ie, use skip data to load how far to + // seek the pos pointer ... instead of having to load frq + // blocks only to sum up how many positions to skip + private void skipPositions() throws IOException { + // Skip positions now: + int toSkip = posPendingCount - freq; + + final int leftInBlock = BLOCK_SIZE - posBufferUpto; + if (toSkip < leftInBlock) { + posBufferUpto += toSkip; + } else { + toSkip -= leftInBlock; + while (toSkip >= BLOCK_SIZE) { + assert posIn.getFilePointer() != lastPosBlockFP; + pforUtil.skip(posIn); + toSkip -= BLOCK_SIZE; + } + refillPositions(); + posBufferUpto = toSkip; + } + + position = 0; + } + + @Override + public int nextPosition() throws IOException { + assert posPendingCount > 0; + + if (posPendingFP != -1) { + posIn.seek(posPendingFP); + posPendingFP = -1; + + // Force buffer refill: + posBufferUpto = BLOCK_SIZE; + } + + if (posPendingCount > freq) { + skipPositions(); + posPendingCount = freq; + } + + if (posBufferUpto == BLOCK_SIZE) { + refillPositions(); + posBufferUpto = 0; + } + position += (int) posDeltaBuffer[posBufferUpto++]; + + posPendingCount--; + return position; + } + + @Override + public int startOffset() { + return -1; + } + + @Override + public int endOffset() { + return -1; + } + + @Override + public BytesRef getPayload() { + return null; + } + + @Override + public long cost() { + return docFreq; + } + } + + final class BlockImpactsEverythingEnum extends ImpactsEnum { + + final PForUtil pforUtil = new PForUtil(new ForUtil()); + + private final long[] docBuffer = new long[BLOCK_SIZE]; + private final long[] freqBuffer = new long[BLOCK_SIZE]; + private final long[] posDeltaBuffer = new long[BLOCK_SIZE]; + + private final long[] payloadLengthBuffer; + private final long[] offsetStartDeltaBuffer; + private final long[] offsetLengthBuffer; + + private byte[] payloadBytes; + private int payloadByteUpto; + private int payloadLength; + + private int lastStartOffset; + private int startOffset = -1; + private int endOffset = -1; + + private int docBufferUpto; + private int posBufferUpto; + + private final ES812ScoreSkipReader skipper; + + final IndexInput docIn; + final IndexInput posIn; + final IndexInput payIn; + final BytesRef payload; + + final boolean indexHasFreq; + final boolean indexHasPos; + final boolean indexHasOffsets; + final boolean indexHasPayloads; + + private int docFreq; // number of docs in this posting list + private long totalTermFreq; // number of positions in this posting list + private int docUpto; // how many docs we've read + private int posDocUpTo; // for how many docs we've read positions, offsets, and payloads + private int doc; // doc we last read + private long accum; // accumulator for doc deltas + private int position; // current position + + // how many positions "behind" we are; nextPosition must + // skip these to "catch up": + private int posPendingCount; + + // Lazy pos seek: if != -1 then we must seek to this FP + // before reading positions: + private long posPendingFP; + + // Lazy pay seek: if != -1 then we must seek to this FP + // before reading payloads/offsets: + private long payPendingFP; + + // Where this term's postings start in the .doc file: + private long docTermStartFP; + + // Where this term's postings start in the .pos file: + private long posTermStartFP; + + // Where this term's payloads/offsets start in the .pay + // file: + private long payTermStartFP; + + // File pointer where the last (vInt encoded) pos delta + // block is. We need this to know whether to bulk + // decode vs vInt decode the block: + private long lastPosBlockFP; + + private int nextSkipDoc = -1; + + private final boolean needsPositions; + private final boolean needsOffsets; // true if we actually need offsets + private final boolean needsPayloads; // true if we actually need payloads + + private boolean isFreqsRead; // shows if freqBuffer for the current doc block are read into freqBuffer + + private long seekTo = -1; + + BlockImpactsEverythingEnum(FieldInfo fieldInfo, IntBlockTermState termState, int flags) throws IOException { + indexHasFreq = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS) >= 0; + indexHasPos = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS) >= 0; + indexHasOffsets = fieldInfo.getIndexOptions().compareTo(IndexOptions.DOCS_AND_FREQS_AND_POSITIONS_AND_OFFSETS) >= 0; + indexHasPayloads = fieldInfo.hasPayloads(); + + needsPositions = PostingsEnum.featureRequested(flags, PostingsEnum.POSITIONS); + needsOffsets = PostingsEnum.featureRequested(flags, PostingsEnum.OFFSETS); + needsPayloads = PostingsEnum.featureRequested(flags, PostingsEnum.PAYLOADS); + + this.docIn = ES812PostingsReader.this.docIn.clone(); + + if (indexHasPos && needsPositions) { + this.posIn = ES812PostingsReader.this.posIn.clone(); + } else { + this.posIn = null; + } + + if ((indexHasOffsets && needsOffsets) || (indexHasPayloads && needsPayloads)) { + this.payIn = ES812PostingsReader.this.payIn.clone(); + } else { + this.payIn = null; + } + + if (indexHasOffsets) { + offsetStartDeltaBuffer = new long[BLOCK_SIZE]; + offsetLengthBuffer = new long[BLOCK_SIZE]; + } else { + offsetStartDeltaBuffer = null; + offsetLengthBuffer = null; + startOffset = -1; + endOffset = -1; + } + + if (indexHasPayloads) { + payloadLengthBuffer = new long[BLOCK_SIZE]; + payloadBytes = new byte[128]; + payload = new BytesRef(); + } else { + payloadLengthBuffer = null; + payloadBytes = null; + payload = null; + } + + docFreq = termState.docFreq; + docTermStartFP = termState.docStartFP; + posTermStartFP = termState.posStartFP; + payTermStartFP = termState.payStartFP; + totalTermFreq = termState.totalTermFreq; + docIn.seek(docTermStartFP); + posPendingFP = posTermStartFP; + payPendingFP = payTermStartFP; + posPendingCount = 0; + if (termState.totalTermFreq < BLOCK_SIZE) { + lastPosBlockFP = posTermStartFP; + } else if (termState.totalTermFreq == BLOCK_SIZE) { + lastPosBlockFP = -1; + } else { + lastPosBlockFP = posTermStartFP + termState.lastPosBlockOffset; + } + + doc = -1; + accum = 0; + docUpto = 0; + posDocUpTo = 0; + isFreqsRead = true; + docBufferUpto = BLOCK_SIZE; + + skipper = new ES812ScoreSkipReader(docIn.clone(), MAX_SKIP_LEVELS, indexHasPos, indexHasOffsets, indexHasPayloads); + skipper.init(docTermStartFP + termState.skipOffset, docTermStartFP, posTermStartFP, payTermStartFP, docFreq); + + if (indexHasFreq == false) { + for (int i = 0; i < ForUtil.BLOCK_SIZE; ++i) { + freqBuffer[i] = 1; + } + } + } + + @Override + public int freq() throws IOException { + if (indexHasFreq && (isFreqsRead == false)) { + pforUtil.decode(docIn, freqBuffer); // read freqBuffer for this block + isFreqsRead = true; + } + return (int) freqBuffer[docBufferUpto - 1]; + } + + @Override + public int docID() { + return doc; + } + + private void refillDocs() throws IOException { + if (indexHasFreq) { + if (isFreqsRead == false) { // previous freq block was not read + // check if we need to load the previous freq block to catch up on positions or we can + // skip it + if (indexHasPos && needsPositions && (posDocUpTo < docUpto)) { + pforUtil.decode(docIn, freqBuffer); // load the previous freq block + } else { + pforUtil.skip(docIn); // skip it + } + isFreqsRead = true; + } + if (indexHasPos && needsPositions) { + while (posDocUpTo < docUpto) { // catch on positions, bring posPendingCount upto the current doc + posPendingCount += (int) freqBuffer[docBufferUpto - (docUpto - posDocUpTo)]; + posDocUpTo++; + } + } + } + + final int left = docFreq - docUpto; + assert left >= 0; + + if (left >= BLOCK_SIZE) { + pforUtil.decodeAndPrefixSum(docIn, accum, docBuffer); + if (indexHasFreq) { + isFreqsRead = false; // freq block will be loaded lazily when necessary, we don't load it here + } + } else { + readVIntBlock(docIn, docBuffer, freqBuffer, left, indexHasFreq); + prefixSum(docBuffer, left, accum); + docBuffer[left] = NO_MORE_DOCS; + } + accum = docBuffer[BLOCK_SIZE - 1]; + docBufferUpto = 0; + } + + private void refillPositions() throws IOException { + if (posIn.getFilePointer() == lastPosBlockFP) { + final int count = (int) (totalTermFreq % BLOCK_SIZE); + int payloadLength = 0; + int offsetLength = 0; + payloadByteUpto = 0; + for (int i = 0; i < count; i++) { + int code = posIn.readVInt(); + if (indexHasPayloads) { + if ((code & 1) != 0) { + payloadLength = posIn.readVInt(); + } + payloadLengthBuffer[i] = payloadLength; + posDeltaBuffer[i] = code >>> 1; + if (payloadLength != 0) { + if (payloadByteUpto + payloadLength > payloadBytes.length) { + payloadBytes = ArrayUtil.grow(payloadBytes, payloadByteUpto + payloadLength); + } + posIn.readBytes(payloadBytes, payloadByteUpto, payloadLength); + payloadByteUpto += payloadLength; + } + } else { + posDeltaBuffer[i] = code; + } + + if (indexHasOffsets) { + int deltaCode = posIn.readVInt(); + if ((deltaCode & 1) != 0) { + offsetLength = posIn.readVInt(); + } + offsetStartDeltaBuffer[i] = deltaCode >>> 1; + offsetLengthBuffer[i] = offsetLength; + } + } + payloadByteUpto = 0; + } else { + pforUtil.decode(posIn, posDeltaBuffer); + + if (indexHasPayloads && payIn != null) { + if (needsPayloads) { + pforUtil.decode(payIn, payloadLengthBuffer); + int numBytes = payIn.readVInt(); + + if (numBytes > payloadBytes.length) { + payloadBytes = ArrayUtil.growNoCopy(payloadBytes, numBytes); + } + payIn.readBytes(payloadBytes, 0, numBytes); + } else { + // this works, because when writing a vint block we always force the first length to be + // written + pforUtil.skip(payIn); // skip over lengths + int numBytes = payIn.readVInt(); // read length of payloadBytes + payIn.seek(payIn.getFilePointer() + numBytes); // skip over payloadBytes + } + payloadByteUpto = 0; + } + + if (indexHasOffsets && payIn != null) { + if (needsOffsets) { + pforUtil.decode(payIn, offsetStartDeltaBuffer); + pforUtil.decode(payIn, offsetLengthBuffer); + } else { + // this works, because when writing a vint block we always force the first length to be + // written + pforUtil.skip(payIn); // skip over starts + pforUtil.skip(payIn); // skip over lengths + } + } + } + } + + @Override + public void advanceShallow(int target) throws IOException { + if (target > nextSkipDoc) { + // always plus one to fix the result, since skip position in Lucene90SkipReader + // is a little different from MultiLevelSkipListReader + final int newDocUpto = skipper.skipTo(target) + 1; + + if (newDocUpto > docUpto) { + // Skipper moved + assert newDocUpto % BLOCK_SIZE == 0 : "got " + newDocUpto; + docUpto = newDocUpto; + posDocUpTo = docUpto; + + // Force to read next block + docBufferUpto = BLOCK_SIZE; + accum = skipper.getDoc(); + posPendingFP = skipper.getPosPointer(); + payPendingFP = skipper.getPayPointer(); + posPendingCount = skipper.getPosBufferUpto(); + lastStartOffset = 0; // new document + payloadByteUpto = skipper.getPayloadByteUpto(); // actually, this is just lastSkipEntry + seekTo = skipper.getDocPointer(); // delay the seek + } + // next time we call advance, this is used to + // foresee whether skipper is necessary. + nextSkipDoc = skipper.getNextSkipDoc(); + } + assert nextSkipDoc >= target; + } + + @Override + public Impacts getImpacts() throws IOException { + advanceShallow(doc); + return skipper.getImpacts(); + } + + @Override + public int nextDoc() throws IOException { + return advance(doc + 1); + } + + @Override + public int advance(int target) throws IOException { + if (target > nextSkipDoc) { + advanceShallow(target); + } + if (docBufferUpto == BLOCK_SIZE) { + if (seekTo >= 0) { + docIn.seek(seekTo); + seekTo = -1; + isFreqsRead = true; // reset isFreqsRead + } + refillDocs(); + } + + // Now scan: + long doc; + while (true) { + doc = docBuffer[docBufferUpto]; + docBufferUpto++; + docUpto++; + + if (doc >= target) { + break; + } + + if (docBufferUpto == BLOCK_SIZE) { + return this.doc = NO_MORE_DOCS; + } + } + position = 0; + lastStartOffset = 0; + + return this.doc = (int) doc; + } + + // TODO: in theory we could avoid loading frq block + // when not needed, ie, use skip data to load how far to + // seek the pos pointer ... instead of having to load frq + // blocks only to sum up how many positions to skip + private void skipPositions() throws IOException { + // Skip positions now: + int toSkip = posPendingCount - (int) freqBuffer[docBufferUpto - 1]; + // if (DEBUG) { + // System.out.println(" FPR.skipPositions: toSkip=" + toSkip); + // } + + final int leftInBlock = BLOCK_SIZE - posBufferUpto; + if (toSkip < leftInBlock) { + int end = posBufferUpto + toSkip; + while (posBufferUpto < end) { + if (indexHasPayloads) { + payloadByteUpto += (int) payloadLengthBuffer[posBufferUpto]; + } + posBufferUpto++; + } + } else { + toSkip -= leftInBlock; + while (toSkip >= BLOCK_SIZE) { + assert posIn.getFilePointer() != lastPosBlockFP; + pforUtil.skip(posIn); + + if (indexHasPayloads && payIn != null) { + // Skip payloadLength block: + pforUtil.skip(payIn); + + // Skip payloadBytes block: + int numBytes = payIn.readVInt(); + payIn.seek(payIn.getFilePointer() + numBytes); + } + + if (indexHasOffsets && payIn != null) { + pforUtil.skip(payIn); + pforUtil.skip(payIn); + } + toSkip -= BLOCK_SIZE; + } + refillPositions(); + payloadByteUpto = 0; + posBufferUpto = 0; + while (posBufferUpto < toSkip) { + if (indexHasPayloads) { + payloadByteUpto += (int) payloadLengthBuffer[posBufferUpto]; + } + posBufferUpto++; + } + } + + position = 0; + lastStartOffset = 0; + } + + @Override + public int nextPosition() throws IOException { + if (indexHasPos == false || needsPositions == false) { + return -1; + } + + if (isFreqsRead == false) { + pforUtil.decode(docIn, freqBuffer); // read freqBuffer for this docs block + isFreqsRead = true; + } + while (posDocUpTo < docUpto) { // bring posPendingCount upto the current doc + posPendingCount += (int) freqBuffer[docBufferUpto - (docUpto - posDocUpTo)]; + posDocUpTo++; + } + + assert posPendingCount > 0; + + if (posPendingFP != -1) { + posIn.seek(posPendingFP); + posPendingFP = -1; + + if (payPendingFP != -1 && payIn != null) { + payIn.seek(payPendingFP); + payPendingFP = -1; + } + + // Force buffer refill: + posBufferUpto = BLOCK_SIZE; + } + + if (posPendingCount > freqBuffer[docBufferUpto - 1]) { + skipPositions(); + posPendingCount = (int) freqBuffer[docBufferUpto - 1]; + } + + if (posBufferUpto == BLOCK_SIZE) { + refillPositions(); + posBufferUpto = 0; + } + position += (int) posDeltaBuffer[posBufferUpto]; + + if (indexHasPayloads) { + payloadLength = (int) payloadLengthBuffer[posBufferUpto]; + payload.bytes = payloadBytes; + payload.offset = payloadByteUpto; + payload.length = payloadLength; + payloadByteUpto += payloadLength; + } + + if (indexHasOffsets && needsOffsets) { + startOffset = lastStartOffset + (int) offsetStartDeltaBuffer[posBufferUpto]; + endOffset = startOffset + (int) offsetLengthBuffer[posBufferUpto]; + lastStartOffset = startOffset; + } + + posBufferUpto++; + posPendingCount--; + return position; + } + + @Override + public int startOffset() { + return startOffset; + } + + @Override + public int endOffset() { + return endOffset; + } + + @Override + public BytesRef getPayload() { + if (payloadLength == 0) { + return null; + } else { + return payload; + } + } + + @Override + public long cost() { + return docFreq; + } + } + + @Override + public void checkIntegrity() throws IOException { + if (docIn != null) { + CodecUtil.checksumEntireFile(docIn); + } + if (posIn != null) { + CodecUtil.checksumEntireFile(posIn); + } + if (payIn != null) { + CodecUtil.checksumEntireFile(payIn); + } + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(positions=" + (posIn != null) + ",payloads=" + (payIn != null) + ")"; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsWriter.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsWriter.java new file mode 100644 index 0000000000000..9ab7ed42efb09 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812PostingsWriter.java @@ -0,0 +1,523 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2022 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.postings; + +import org.apache.lucene.codecs.BlockTermState; +import org.apache.lucene.codecs.CodecUtil; +import org.apache.lucene.codecs.CompetitiveImpactAccumulator; +import org.apache.lucene.codecs.PushPostingsWriterBase; +import org.apache.lucene.index.CorruptIndexException; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.IndexFileNames; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.NumericDocValues; +import org.apache.lucene.index.SegmentWriteState; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.BitUtil; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.core.IOUtils; +import org.elasticsearch.index.codec.postings.ES812PostingsFormat.IntBlockTermState; + +import java.io.IOException; + +import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.BLOCK_SIZE; +import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.DOC_CODEC; +import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.MAX_SKIP_LEVELS; +import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.PAY_CODEC; +import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.POS_CODEC; +import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.TERMS_CODEC; +import static org.elasticsearch.index.codec.postings.ES812PostingsFormat.VERSION_CURRENT; + +/** + * Concrete class that writes docId(maybe frq,pos,offset,payloads) list with postings format. + * + *

Postings list for each term will be stored separately. + * + * @see ES812SkipWriter for details about skipping setting and postings layout. + */ +final class ES812PostingsWriter extends PushPostingsWriterBase { + + IndexOutput docOut; + IndexOutput posOut; + IndexOutput payOut; + + static final IntBlockTermState emptyState = new IntBlockTermState(); + IntBlockTermState lastState; + + // Holds starting file pointers for current term: + private long docStartFP; + private long posStartFP; + private long payStartFP; + + final long[] docDeltaBuffer; + final long[] freqBuffer; + private int docBufferUpto; + + final long[] posDeltaBuffer; + final long[] payloadLengthBuffer; + final long[] offsetStartDeltaBuffer; + final long[] offsetLengthBuffer; + private int posBufferUpto; + + private byte[] payloadBytes; + private int payloadByteUpto; + + private int lastBlockDocID; + private long lastBlockPosFP; + private long lastBlockPayFP; + private int lastBlockPosBufferUpto; + private int lastBlockPayloadByteUpto; + + private int lastDocID; + private int lastPosition; + private int lastStartOffset; + private int docCount; + + private final PForUtil pforUtil; + private final ES812SkipWriter skipWriter; + + private boolean fieldHasNorms; + private NumericDocValues norms; + private final CompetitiveImpactAccumulator competitiveFreqNormAccumulator = new CompetitiveImpactAccumulator(); + + /** Creates a postings writer */ + ES812PostingsWriter(SegmentWriteState state) throws IOException { + + String docFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, ES812PostingsFormat.DOC_EXTENSION); + docOut = state.directory.createOutput(docFileName, state.context); + IndexOutput posOut = null; + IndexOutput payOut = null; + boolean success = false; + try { + CodecUtil.writeIndexHeader(docOut, DOC_CODEC, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); + pforUtil = new PForUtil(new ForUtil()); + if (state.fieldInfos.hasProx()) { + posDeltaBuffer = new long[BLOCK_SIZE]; + String posFileName = IndexFileNames.segmentFileName( + state.segmentInfo.name, + state.segmentSuffix, + ES812PostingsFormat.POS_EXTENSION + ); + posOut = state.directory.createOutput(posFileName, state.context); + CodecUtil.writeIndexHeader(posOut, POS_CODEC, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); + + if (state.fieldInfos.hasPayloads()) { + payloadBytes = new byte[128]; + payloadLengthBuffer = new long[BLOCK_SIZE]; + } else { + payloadBytes = null; + payloadLengthBuffer = null; + } + + if (state.fieldInfos.hasOffsets()) { + offsetStartDeltaBuffer = new long[BLOCK_SIZE]; + offsetLengthBuffer = new long[BLOCK_SIZE]; + } else { + offsetStartDeltaBuffer = null; + offsetLengthBuffer = null; + } + + if (state.fieldInfos.hasPayloads() || state.fieldInfos.hasOffsets()) { + String payFileName = IndexFileNames.segmentFileName( + state.segmentInfo.name, + state.segmentSuffix, + ES812PostingsFormat.PAY_EXTENSION + ); + payOut = state.directory.createOutput(payFileName, state.context); + CodecUtil.writeIndexHeader(payOut, PAY_CODEC, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); + } + } else { + posDeltaBuffer = null; + payloadLengthBuffer = null; + offsetStartDeltaBuffer = null; + offsetLengthBuffer = null; + payloadBytes = null; + } + this.payOut = payOut; + this.posOut = posOut; + success = true; + } finally { + if (success == false) { + IOUtils.closeWhileHandlingException(docOut, posOut, payOut); + } + } + + docDeltaBuffer = new long[BLOCK_SIZE]; + freqBuffer = new long[BLOCK_SIZE]; + + // TODO: should we try skipping every 2/4 blocks...? + skipWriter = new ES812SkipWriter(MAX_SKIP_LEVELS, BLOCK_SIZE, state.segmentInfo.maxDoc(), docOut, posOut, payOut); + } + + @Override + public IntBlockTermState newTermState() { + return new IntBlockTermState(); + } + + @Override + public void init(IndexOutput termsOut, SegmentWriteState state) throws IOException { + CodecUtil.writeIndexHeader(termsOut, TERMS_CODEC, VERSION_CURRENT, state.segmentInfo.getId(), state.segmentSuffix); + termsOut.writeVInt(BLOCK_SIZE); + } + + @Override + public void setField(FieldInfo fieldInfo) { + super.setField(fieldInfo); + skipWriter.setField(writePositions, writeOffsets, writePayloads); + lastState = emptyState; + fieldHasNorms = fieldInfo.hasNorms(); + } + + @Override + public void startTerm(NumericDocValues norms) { + docStartFP = docOut.getFilePointer(); + if (writePositions) { + posStartFP = posOut.getFilePointer(); + if (writePayloads || writeOffsets) { + payStartFP = payOut.getFilePointer(); + } + } + lastDocID = 0; + lastBlockDocID = -1; + skipWriter.resetSkip(); + this.norms = norms; + competitiveFreqNormAccumulator.clear(); + } + + @Override + public void startDoc(int docID, int termDocFreq) throws IOException { + // Have collected a block of docs, and get a new doc. + // Should write skip data as well as postings list for + // current block. + if (lastBlockDocID != -1 && docBufferUpto == 0) { + skipWriter.bufferSkip( + lastBlockDocID, + competitiveFreqNormAccumulator, + docCount, + lastBlockPosFP, + lastBlockPayFP, + lastBlockPosBufferUpto, + lastBlockPayloadByteUpto + ); + competitiveFreqNormAccumulator.clear(); + } + + final int docDelta = docID - lastDocID; + + if (docID < 0 || (docCount > 0 && docDelta <= 0)) { + throw new CorruptIndexException("docs out of order (" + docID + " <= " + lastDocID + " )", docOut); + } + + docDeltaBuffer[docBufferUpto] = docDelta; + if (writeFreqs) { + freqBuffer[docBufferUpto] = termDocFreq; + } + + docBufferUpto++; + docCount++; + + if (docBufferUpto == BLOCK_SIZE) { + pforUtil.encode(docDeltaBuffer, docOut); + if (writeFreqs) { + pforUtil.encode(freqBuffer, docOut); + } + // NOTE: don't set docBufferUpto back to 0 here; + // finishDoc will do so (because it needs to see that + // the block was filled so it can save skip data) + } + + lastDocID = docID; + lastPosition = 0; + lastStartOffset = 0; + + long norm; + if (fieldHasNorms) { + boolean found = norms.advanceExact(docID); + if (found == false) { + // This can happen if indexing hits a problem after adding a doc to the + // postings but before buffering the norm. Such documents are written + // deleted and will go away on the first merge. + norm = 1L; + } else { + norm = norms.longValue(); + assert norm != 0 : docID; + } + } else { + norm = 1L; + } + + competitiveFreqNormAccumulator.add(writeFreqs ? termDocFreq : 1, norm); + } + + @Override + public void addPosition(int position, BytesRef payload, int startOffset, int endOffset) throws IOException { + if (position > IndexWriter.MAX_POSITION) { + throw new CorruptIndexException( + "position=" + position + " is too large (> IndexWriter.MAX_POSITION=" + IndexWriter.MAX_POSITION + ")", + docOut + ); + } + if (position < 0) { + throw new CorruptIndexException("position=" + position + " is < 0", docOut); + } + posDeltaBuffer[posBufferUpto] = position - lastPosition; + if (writePayloads) { + if (payload == null || payload.length == 0) { + // no payload + payloadLengthBuffer[posBufferUpto] = 0; + } else { + payloadLengthBuffer[posBufferUpto] = payload.length; + if (payloadByteUpto + payload.length > payloadBytes.length) { + payloadBytes = ArrayUtil.grow(payloadBytes, payloadByteUpto + payload.length); + } + System.arraycopy(payload.bytes, payload.offset, payloadBytes, payloadByteUpto, payload.length); + payloadByteUpto += payload.length; + } + } + + if (writeOffsets) { + assert startOffset >= lastStartOffset; + assert endOffset >= startOffset; + offsetStartDeltaBuffer[posBufferUpto] = startOffset - lastStartOffset; + offsetLengthBuffer[posBufferUpto] = endOffset - startOffset; + lastStartOffset = startOffset; + } + + posBufferUpto++; + lastPosition = position; + if (posBufferUpto == BLOCK_SIZE) { + pforUtil.encode(posDeltaBuffer, posOut); + + if (writePayloads) { + pforUtil.encode(payloadLengthBuffer, payOut); + payOut.writeVInt(payloadByteUpto); + payOut.writeBytes(payloadBytes, 0, payloadByteUpto); + payloadByteUpto = 0; + } + if (writeOffsets) { + pforUtil.encode(offsetStartDeltaBuffer, payOut); + pforUtil.encode(offsetLengthBuffer, payOut); + } + posBufferUpto = 0; + } + } + + @Override + public void finishDoc() throws IOException { + // Since we don't know df for current term, we had to buffer + // those skip data for each block, and when a new doc comes, + // write them to skip file. + if (docBufferUpto == BLOCK_SIZE) { + lastBlockDocID = lastDocID; + if (posOut != null) { + if (payOut != null) { + lastBlockPayFP = payOut.getFilePointer(); + } + lastBlockPosFP = posOut.getFilePointer(); + lastBlockPosBufferUpto = posBufferUpto; + lastBlockPayloadByteUpto = payloadByteUpto; + } + docBufferUpto = 0; + } + } + + /** Called when we are done adding docs to this term */ + @Override + public void finishTerm(BlockTermState _state) throws IOException { + IntBlockTermState state = (IntBlockTermState) _state; + assert state.docFreq > 0; + + // TODO: wasteful we are counting this (counting # docs + // for this term) in two places? + assert state.docFreq == docCount : state.docFreq + " vs " + docCount; + + // docFreq == 1, don't write the single docid/freq to a separate file along with a pointer to + // it. + final int singletonDocID; + if (state.docFreq == 1) { + // pulse the singleton docid into the term dictionary, freq is implicitly totalTermFreq + singletonDocID = (int) docDeltaBuffer[0]; + } else { + singletonDocID = -1; + // vInt encode the remaining doc deltas and freqs: + for (int i = 0; i < docBufferUpto; i++) { + final int docDelta = (int) docDeltaBuffer[i]; + final int freq = (int) freqBuffer[i]; + if (writeFreqs == false) { + docOut.writeVInt(docDelta); + } else if (freq == 1) { + docOut.writeVInt((docDelta << 1) | 1); + } else { + docOut.writeVInt(docDelta << 1); + docOut.writeVInt(freq); + } + } + } + + final long lastPosBlockOffset; + + if (writePositions) { + // totalTermFreq is just total number of positions(or payloads, or offsets) + // associated with current term. + assert state.totalTermFreq != -1; + if (state.totalTermFreq > BLOCK_SIZE) { + // record file offset for last pos in last block + lastPosBlockOffset = posOut.getFilePointer() - posStartFP; + } else { + lastPosBlockOffset = -1; + } + if (posBufferUpto > 0) { + // TODO: should we send offsets/payloads to + // .pay...? seems wasteful (have to store extra + // vLong for low (< BLOCK_SIZE) DF terms = vast vast + // majority) + + // vInt encode the remaining positions/payloads/offsets: + int lastPayloadLength = -1; // force first payload length to be written + int lastOffsetLength = -1; // force first offset length to be written + int payloadBytesReadUpto = 0; + for (int i = 0; i < posBufferUpto; i++) { + final int posDelta = (int) posDeltaBuffer[i]; + if (writePayloads) { + final int payloadLength = (int) payloadLengthBuffer[i]; + if (payloadLength != lastPayloadLength) { + lastPayloadLength = payloadLength; + posOut.writeVInt((posDelta << 1) | 1); + posOut.writeVInt(payloadLength); + } else { + posOut.writeVInt(posDelta << 1); + } + + if (payloadLength != 0) { + posOut.writeBytes(payloadBytes, payloadBytesReadUpto, payloadLength); + payloadBytesReadUpto += payloadLength; + } + } else { + posOut.writeVInt(posDelta); + } + + if (writeOffsets) { + int delta = (int) offsetStartDeltaBuffer[i]; + int length = (int) offsetLengthBuffer[i]; + if (length == lastOffsetLength) { + posOut.writeVInt(delta << 1); + } else { + posOut.writeVInt(delta << 1 | 1); + posOut.writeVInt(length); + lastOffsetLength = length; + } + } + } + + if (writePayloads) { + assert payloadBytesReadUpto == payloadByteUpto; + payloadByteUpto = 0; + } + } + } else { + lastPosBlockOffset = -1; + } + + long skipOffset; + if (docCount > BLOCK_SIZE) { + skipOffset = skipWriter.writeSkip(docOut) - docStartFP; + } else { + skipOffset = -1; + } + + state.docStartFP = docStartFP; + state.posStartFP = posStartFP; + state.payStartFP = payStartFP; + state.singletonDocID = singletonDocID; + state.skipOffset = skipOffset; + state.lastPosBlockOffset = lastPosBlockOffset; + docBufferUpto = 0; + posBufferUpto = 0; + lastDocID = 0; + docCount = 0; + } + + @Override + public void encodeTerm(DataOutput out, FieldInfo fieldInfo, BlockTermState _state, boolean absolute) throws IOException { + IntBlockTermState state = (IntBlockTermState) _state; + if (absolute) { + lastState = emptyState; + assert lastState.docStartFP == 0; + } + + if (lastState.singletonDocID != -1 && state.singletonDocID != -1 && state.docStartFP == lastState.docStartFP) { + // With runs of rare values such as ID fields, the increment of pointers in the docs file is + // often 0. + // Furthermore some ID schemes like auto-increment IDs or Flake IDs are monotonic, so we + // encode the delta + // between consecutive doc IDs to save space. + final long delta = (long) state.singletonDocID - lastState.singletonDocID; + out.writeVLong((BitUtil.zigZagEncode(delta) << 1) | 0x01); + } else { + out.writeVLong((state.docStartFP - lastState.docStartFP) << 1); + if (state.singletonDocID != -1) { + out.writeVInt(state.singletonDocID); + } + } + + if (writePositions) { + out.writeVLong(state.posStartFP - lastState.posStartFP); + if (writePayloads || writeOffsets) { + out.writeVLong(state.payStartFP - lastState.payStartFP); + } + } + if (writePositions) { + if (state.lastPosBlockOffset != -1) { + out.writeVLong(state.lastPosBlockOffset); + } + } + if (state.skipOffset != -1) { + out.writeVLong(state.skipOffset); + } + lastState = state; + } + + @Override + public void close() throws IOException { + // TODO: add a finish() at least to PushBase? DV too...? + boolean success = false; + try { + if (docOut != null) { + CodecUtil.writeFooter(docOut); + } + if (posOut != null) { + CodecUtil.writeFooter(posOut); + } + if (payOut != null) { + CodecUtil.writeFooter(payOut); + } + success = true; + } finally { + if (success) { + IOUtils.close(docOut, posOut, payOut); + } else { + IOUtils.closeWhileHandlingException(docOut, posOut, payOut); + } + docOut = posOut = payOut = null; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812ScoreSkipReader.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812ScoreSkipReader.java new file mode 100644 index 0000000000000..f76e1026945e6 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812ScoreSkipReader.java @@ -0,0 +1,157 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2022 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.postings; + +import org.apache.lucene.index.Impact; +import org.apache.lucene.index.Impacts; +import org.apache.lucene.store.ByteArrayDataInput; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.util.ArrayUtil; + +import java.io.IOException; +import java.util.AbstractList; +import java.util.Arrays; +import java.util.List; +import java.util.RandomAccess; + +final class ES812ScoreSkipReader extends ES812SkipReader { + + private final byte[][] impactData; + private final int[] impactDataLength; + private final ByteArrayDataInput badi = new ByteArrayDataInput(); + private final Impacts impacts; + private int numLevels = 1; + private final MutableImpactList[] perLevelImpacts; + + ES812ScoreSkipReader(IndexInput skipStream, int maxSkipLevels, boolean hasPos, boolean hasOffsets, boolean hasPayloads) { + super(skipStream, maxSkipLevels, hasPos, hasOffsets, hasPayloads); + this.impactData = new byte[maxSkipLevels][]; + Arrays.fill(impactData, new byte[0]); + this.impactDataLength = new int[maxSkipLevels]; + this.perLevelImpacts = new MutableImpactList[maxSkipLevels]; + for (int i = 0; i < perLevelImpacts.length; ++i) { + perLevelImpacts[i] = new MutableImpactList(); + } + impacts = new Impacts() { + + @Override + public int numLevels() { + return numLevels; + } + + @Override + public int getDocIdUpTo(int level) { + return skipDoc[level]; + } + + @Override + public List getImpacts(int level) { + assert level < numLevels; + if (impactDataLength[level] > 0) { + badi.reset(impactData[level], 0, impactDataLength[level]); + perLevelImpacts[level] = readImpacts(badi, perLevelImpacts[level]); + impactDataLength[level] = 0; + } + return perLevelImpacts[level]; + } + }; + } + + @Override + public int skipTo(int target) throws IOException { + int result = super.skipTo(target); + if (numberOfSkipLevels > 0) { + numLevels = numberOfSkipLevels; + } else { + // End of postings don't have skip data anymore, so we fill with dummy data + // like SlowImpactsEnum. + numLevels = 1; + perLevelImpacts[0].length = 1; + perLevelImpacts[0].impacts[0].freq = Integer.MAX_VALUE; + perLevelImpacts[0].impacts[0].norm = 1L; + impactDataLength[0] = 0; + } + return result; + } + + Impacts getImpacts() { + return impacts; + } + + @Override + protected void readImpacts(int level, IndexInput skipStream) throws IOException { + int length = skipStream.readVInt(); + if (impactData[level].length < length) { + impactData[level] = new byte[ArrayUtil.oversize(length, Byte.BYTES)]; + } + skipStream.readBytes(impactData[level], 0, length); + impactDataLength[level] = length; + } + + static MutableImpactList readImpacts(ByteArrayDataInput in, MutableImpactList reuse) { + int maxNumImpacts = in.length(); // at most one impact per byte + if (reuse.impacts.length < maxNumImpacts) { + int oldLength = reuse.impacts.length; + reuse.impacts = ArrayUtil.grow(reuse.impacts, maxNumImpacts); + for (int i = oldLength; i < reuse.impacts.length; ++i) { + reuse.impacts[i] = new Impact(Integer.MAX_VALUE, 1L); + } + } + + int freq = 0; + long norm = 0; + int length = 0; + while (in.getPosition() < in.length()) { + int freqDelta = in.readVInt(); + if ((freqDelta & 0x01) != 0) { + freq += 1 + (freqDelta >>> 1); + try { + norm += 1 + in.readZLong(); + } catch (IOException e) { + throw new RuntimeException(e); // cannot happen on a BADI + } + } else { + freq += 1 + (freqDelta >>> 1); + norm++; + } + Impact impact = reuse.impacts[length]; + impact.freq = freq; + impact.norm = norm; + length++; + } + reuse.length = length; + return reuse; + } + + static class MutableImpactList extends AbstractList implements RandomAccess { + int length = 1; + Impact[] impacts = new Impact[] { new Impact(Integer.MAX_VALUE, 1L) }; + + @Override + public Impact get(int index) { + return impacts[index]; + } + + @Override + public int size() { + return length; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812SkipReader.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812SkipReader.java new file mode 100644 index 0000000000000..11c0c611312fc --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812SkipReader.java @@ -0,0 +1,203 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2022 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.postings; + +import org.apache.lucene.codecs.MultiLevelSkipListReader; +import org.apache.lucene.store.IndexInput; + +import java.io.IOException; +import java.util.Arrays; + +/** + * Implements the skip list reader for block postings format that stores positions and payloads. + * + *

Although this skipper uses MultiLevelSkipListReader as an interface, its definition of skip + * position will be a little different. + * + *

For example, when skipInterval = blockSize = 3, df = 2*skipInterval = 6, + * + *

+ * 0 1 2 3 4 5
+ * d d d d d d    (posting list)
+ *     ^     ^    (skip point in MultiLeveSkipWriter)
+ *       ^        (skip point in Lucene90SkipWriter)
+ * 
+ * + *

In this case, MultiLevelSkipListReader will use the last document as a skip point, while + * Lucene90SkipReader should assume no skip point will comes. + * + *

If we use the interface directly in Lucene90SkipReader, it may silly try to read another skip + * data after the only skip point is loaded. + * + *

To illustrate this, we can call skipTo(d[5]), since skip point d[3] has smaller docId, and + * numSkipped+blockSize== df, the MultiLevelSkipListReader will assume the skip list isn't exhausted + * yet, and try to load a non-existed skip point + * + *

Therefore, we'll trim df before passing it to the interface. see trim(int) + */ +class ES812SkipReader extends MultiLevelSkipListReader { + private long[] docPointer; + private long[] posPointer; + private long[] payPointer; + private int[] posBufferUpto; + private int[] payloadByteUpto; + + private long lastPosPointer; + private long lastPayPointer; + private int lastPayloadByteUpto; + private long lastDocPointer; + private int lastPosBufferUpto; + + ES812SkipReader(IndexInput skipStream, int maxSkipLevels, boolean hasPos, boolean hasOffsets, boolean hasPayloads) { + super(skipStream, maxSkipLevels, ForUtil.BLOCK_SIZE, 8); + docPointer = new long[maxSkipLevels]; + if (hasPos) { + posPointer = new long[maxSkipLevels]; + posBufferUpto = new int[maxSkipLevels]; + if (hasPayloads) { + payloadByteUpto = new int[maxSkipLevels]; + } else { + payloadByteUpto = null; + } + if (hasOffsets || hasPayloads) { + payPointer = new long[maxSkipLevels]; + } else { + payPointer = null; + } + } else { + posPointer = null; + } + } + + /** + * Trim original docFreq to tell skipReader read proper number of skip points. + * + *

Since our definition in Lucene90Skip* is a little different from MultiLevelSkip* This + * trimmed docFreq will prevent skipReader from: 1. silly reading a non-existed skip point after + * the last block boundary 2. moving into the vInt block + */ + protected int trim(int df) { + return df % ForUtil.BLOCK_SIZE == 0 ? df - 1 : df; + } + + public void init(long skipPointer, long docBasePointer, long posBasePointer, long payBasePointer, int df) throws IOException { + super.init(skipPointer, trim(df)); + lastDocPointer = docBasePointer; + lastPosPointer = posBasePointer; + lastPayPointer = payBasePointer; + + Arrays.fill(docPointer, docBasePointer); + if (posPointer != null) { + Arrays.fill(posPointer, posBasePointer); + if (payPointer != null) { + Arrays.fill(payPointer, payBasePointer); + } + } else { + assert posBasePointer == 0; + } + } + + /** + * Returns the doc pointer of the doc to which the last call of {@link + * MultiLevelSkipListReader#skipTo(int)} has skipped. + */ + public long getDocPointer() { + return lastDocPointer; + } + + public long getPosPointer() { + return lastPosPointer; + } + + public int getPosBufferUpto() { + return lastPosBufferUpto; + } + + public long getPayPointer() { + return lastPayPointer; + } + + public int getPayloadByteUpto() { + return lastPayloadByteUpto; + } + + public int getNextSkipDoc() { + return skipDoc[0]; + } + + @Override + protected void seekChild(int level) throws IOException { + super.seekChild(level); + docPointer[level] = lastDocPointer; + if (posPointer != null) { + posPointer[level] = lastPosPointer; + posBufferUpto[level] = lastPosBufferUpto; + if (payloadByteUpto != null) { + payloadByteUpto[level] = lastPayloadByteUpto; + } + if (payPointer != null) { + payPointer[level] = lastPayPointer; + } + } + } + + @Override + protected void setLastSkipData(int level) { + super.setLastSkipData(level); + lastDocPointer = docPointer[level]; + + if (posPointer != null) { + lastPosPointer = posPointer[level]; + lastPosBufferUpto = posBufferUpto[level]; + if (payPointer != null) { + lastPayPointer = payPointer[level]; + } + if (payloadByteUpto != null) { + lastPayloadByteUpto = payloadByteUpto[level]; + } + } + } + + @Override + protected int readSkipData(int level, IndexInput skipStream) throws IOException { + int delta = skipStream.readVInt(); + docPointer[level] += skipStream.readVLong(); + + if (posPointer != null) { + posPointer[level] += skipStream.readVLong(); + posBufferUpto[level] = skipStream.readVInt(); + + if (payloadByteUpto != null) { + payloadByteUpto[level] = skipStream.readVInt(); + } + + if (payPointer != null) { + payPointer[level] += skipStream.readVLong(); + } + } + readImpacts(level, skipStream); + return delta; + } + + // The default impl skips impacts + protected void readImpacts(int level, IndexInput skipStream) throws IOException { + skipStream.skipBytes(skipStream.readVInt()); + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ES812SkipWriter.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812SkipWriter.java new file mode 100644 index 0000000000000..dbfb7c86a1475 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/ES812SkipWriter.java @@ -0,0 +1,229 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2022 Elasticsearch B.V. + */ + +package org.elasticsearch.index.codec.postings; + +import org.apache.lucene.codecs.CompetitiveImpactAccumulator; +import org.apache.lucene.codecs.MultiLevelSkipListWriter; +import org.apache.lucene.index.Impact; +import org.apache.lucene.store.ByteBuffersDataOutput; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.store.IndexOutput; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collection; + +/** + * Write skip lists with multiple levels, and support skip within block ints. + * + *

Assume that docFreq = 28, skipInterval = blockSize = 12 + * + *

+ *  |       block#0       | |      block#1        | |vInts|
+ *  d d d d d d d d d d d d d d d d d d d d d d d d d d d d (posting list)
+ *                          ^                       ^       (level 0 skip point)
+ * 
+ * + *

Note that skipWriter will ignore first document in block#0, since it is useless as a skip + * point. Also, we'll never skip into the vInts block, only record skip data at the start its start + * point(if it exist). + * + *

For each skip point, we will record: 1. docID in former position, i.e. for position 12, record + * docID[11], etc. 2. its related file points(position, payload), 3. related numbers or + * uptos(position, payload). 4. start offset. + */ +final class ES812SkipWriter extends MultiLevelSkipListWriter { + private int[] lastSkipDoc; + private long[] lastSkipDocPointer; + private long[] lastSkipPosPointer; + private long[] lastSkipPayPointer; + + private final IndexOutput docOut; + private final IndexOutput posOut; + private final IndexOutput payOut; + + private int curDoc; + private long curDocPointer; + private long curPosPointer; + private long curPayPointer; + private int curPosBufferUpto; + private int curPayloadByteUpto; + private CompetitiveImpactAccumulator[] curCompetitiveFreqNorms; + private boolean fieldHasPositions; + private boolean fieldHasOffsets; + private boolean fieldHasPayloads; + + ES812SkipWriter(int maxSkipLevels, int blockSize, int docCount, IndexOutput docOut, IndexOutput posOut, IndexOutput payOut) { + super(blockSize, 8, maxSkipLevels, docCount); + this.docOut = docOut; + this.posOut = posOut; + this.payOut = payOut; + + lastSkipDoc = new int[maxSkipLevels]; + lastSkipDocPointer = new long[maxSkipLevels]; + if (posOut != null) { + lastSkipPosPointer = new long[maxSkipLevels]; + if (payOut != null) { + lastSkipPayPointer = new long[maxSkipLevels]; + } + } + curCompetitiveFreqNorms = new CompetitiveImpactAccumulator[maxSkipLevels]; + for (int i = 0; i < maxSkipLevels; ++i) { + curCompetitiveFreqNorms[i] = new CompetitiveImpactAccumulator(); + } + } + + void setField(boolean fieldHasPositions, boolean fieldHasOffsets, boolean fieldHasPayloads) { + this.fieldHasPositions = fieldHasPositions; + this.fieldHasOffsets = fieldHasOffsets; + this.fieldHasPayloads = fieldHasPayloads; + } + + // tricky: we only skip data for blocks (terms with more than 128 docs), but re-init'ing the + // skipper + // is pretty slow for rare terms in large segments as we have to fill O(log #docs in segment) of + // junk. + // this is the vast majority of terms (worst case: ID field or similar). so in resetSkip() we + // save + // away the previous pointers, and lazy-init only if we need to buffer skip data for the term. + private boolean initialized; + long lastDocFP; + long lastPosFP; + long lastPayFP; + + @Override + public void resetSkip() { + lastDocFP = docOut.getFilePointer(); + if (fieldHasPositions) { + lastPosFP = posOut.getFilePointer(); + if (fieldHasOffsets || fieldHasPayloads) { + lastPayFP = payOut.getFilePointer(); + } + } + if (initialized) { + for (CompetitiveImpactAccumulator acc : curCompetitiveFreqNorms) { + acc.clear(); + } + } + initialized = false; + } + + private void initSkip() { + if (initialized == false) { + super.resetSkip(); + Arrays.fill(lastSkipDoc, 0); + Arrays.fill(lastSkipDocPointer, lastDocFP); + if (fieldHasPositions) { + Arrays.fill(lastSkipPosPointer, lastPosFP); + if (fieldHasOffsets || fieldHasPayloads) { + Arrays.fill(lastSkipPayPointer, lastPayFP); + } + } + // sets of competitive freq,norm pairs should be empty at this point + assert Arrays.stream(curCompetitiveFreqNorms) + .map(CompetitiveImpactAccumulator::getCompetitiveFreqNormPairs) + .mapToInt(Collection::size) + .sum() == 0; + initialized = true; + } + } + + /** Sets the values for the current skip data. */ + public void bufferSkip( + int doc, + CompetitiveImpactAccumulator competitiveFreqNorms, + int numDocs, + long posFP, + long payFP, + int posBufferUpto, + int payloadByteUpto + ) throws IOException { + initSkip(); + this.curDoc = doc; + this.curDocPointer = docOut.getFilePointer(); + this.curPosPointer = posFP; + this.curPayPointer = payFP; + this.curPosBufferUpto = posBufferUpto; + this.curPayloadByteUpto = payloadByteUpto; + this.curCompetitiveFreqNorms[0].addAll(competitiveFreqNorms); + bufferSkip(numDocs); + } + + private final ByteBuffersDataOutput freqNormOut = ByteBuffersDataOutput.newResettableInstance(); + + @Override + protected void writeSkipData(int level, DataOutput skipBuffer) throws IOException { + + int delta = curDoc - lastSkipDoc[level]; + + skipBuffer.writeVInt(delta); + lastSkipDoc[level] = curDoc; + + skipBuffer.writeVLong(curDocPointer - lastSkipDocPointer[level]); + lastSkipDocPointer[level] = curDocPointer; + + if (fieldHasPositions) { + + skipBuffer.writeVLong(curPosPointer - lastSkipPosPointer[level]); + lastSkipPosPointer[level] = curPosPointer; + skipBuffer.writeVInt(curPosBufferUpto); + + if (fieldHasPayloads) { + skipBuffer.writeVInt(curPayloadByteUpto); + } + + if (fieldHasOffsets || fieldHasPayloads) { + skipBuffer.writeVLong(curPayPointer - lastSkipPayPointer[level]); + lastSkipPayPointer[level] = curPayPointer; + } + } + + CompetitiveImpactAccumulator competitiveFreqNorms = curCompetitiveFreqNorms[level]; + assert competitiveFreqNorms.getCompetitiveFreqNormPairs().size() > 0; + if (level + 1 < numberOfSkipLevels) { + curCompetitiveFreqNorms[level + 1].addAll(competitiveFreqNorms); + } + writeImpacts(competitiveFreqNorms, freqNormOut); + skipBuffer.writeVInt(Math.toIntExact(freqNormOut.size())); + freqNormOut.copyTo(skipBuffer); + freqNormOut.reset(); + competitiveFreqNorms.clear(); + } + + static void writeImpacts(CompetitiveImpactAccumulator acc, DataOutput out) throws IOException { + Collection impacts = acc.getCompetitiveFreqNormPairs(); + Impact previous = new Impact(0, 0); + for (Impact impact : impacts) { + assert impact.freq > previous.freq; + assert Long.compareUnsigned(impact.norm, previous.norm) > 0; + int freqDelta = impact.freq - previous.freq - 1; + long normDelta = impact.norm - previous.norm - 1; + if (normDelta == 0) { + // most of time, norm only increases by 1, so we can fold everything in a single byte + out.writeVInt(freqDelta << 1); + } else { + out.writeVInt((freqDelta << 1) | 1); + out.writeZLong(normDelta); + } + previous = impact; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/ForUtil.java b/server/src/main/java/org/elasticsearch/index/codec/postings/ForUtil.java new file mode 100644 index 0000000000000..d874caab1b8c0 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/ForUtil.java @@ -0,0 +1,1049 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2022 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.postings; + +import org.apache.lucene.store.DataInput; +import org.apache.lucene.store.DataOutput; + +import java.io.IOException; + +// Inspired from https://fulmicoton.com/posts/bitpacking/ +// Encodes multiple integers in a long to get SIMD-like speedups. +// If bitsPerValue <= 8 then we pack 8 ints per long +// else if bitsPerValue <= 16 we pack 4 ints per long +// else we pack 2 ints per long +final class ForUtil { + + static final int BLOCK_SIZE = 128; + private static final int BLOCK_SIZE_LOG2 = 7; + + private static long expandMask32(long mask32) { + return mask32 | (mask32 << 32); + } + + private static long expandMask16(long mask16) { + return expandMask32(mask16 | (mask16 << 16)); + } + + private static long expandMask8(long mask8) { + return expandMask16(mask8 | (mask8 << 8)); + } + + private static long mask32(int bitsPerValue) { + return expandMask32((1L << bitsPerValue) - 1); + } + + private static long mask16(int bitsPerValue) { + return expandMask16((1L << bitsPerValue) - 1); + } + + private static long mask8(int bitsPerValue) { + return expandMask8((1L << bitsPerValue) - 1); + } + + private static void expand8(long[] arr) { + for (int i = 0; i < 16; ++i) { + long l = arr[i]; + arr[i] = (l >>> 56) & 0xFFL; + arr[16 + i] = (l >>> 48) & 0xFFL; + arr[32 + i] = (l >>> 40) & 0xFFL; + arr[48 + i] = (l >>> 32) & 0xFFL; + arr[64 + i] = (l >>> 24) & 0xFFL; + arr[80 + i] = (l >>> 16) & 0xFFL; + arr[96 + i] = (l >>> 8) & 0xFFL; + arr[112 + i] = l & 0xFFL; + } + } + + private static void expand8To32(long[] arr) { + for (int i = 0; i < 16; ++i) { + long l = arr[i]; + arr[i] = (l >>> 24) & 0x000000FF000000FFL; + arr[16 + i] = (l >>> 16) & 0x000000FF000000FFL; + arr[32 + i] = (l >>> 8) & 0x000000FF000000FFL; + arr[48 + i] = l & 0x000000FF000000FFL; + } + } + + private static void collapse8(long[] arr) { + for (int i = 0; i < 16; ++i) { + arr[i] = (arr[i] << 56) | (arr[16 + i] << 48) | (arr[32 + i] << 40) | (arr[48 + i] << 32) | (arr[64 + i] << 24) | (arr[80 + i] + << 16) | (arr[96 + i] << 8) | arr[112 + i]; + } + } + + private static void expand16(long[] arr) { + for (int i = 0; i < 32; ++i) { + long l = arr[i]; + arr[i] = (l >>> 48) & 0xFFFFL; + arr[32 + i] = (l >>> 32) & 0xFFFFL; + arr[64 + i] = (l >>> 16) & 0xFFFFL; + arr[96 + i] = l & 0xFFFFL; + } + } + + private static void expand16To32(long[] arr) { + for (int i = 0; i < 32; ++i) { + long l = arr[i]; + arr[i] = (l >>> 16) & 0x0000FFFF0000FFFFL; + arr[32 + i] = l & 0x0000FFFF0000FFFFL; + } + } + + private static void collapse16(long[] arr) { + for (int i = 0; i < 32; ++i) { + arr[i] = (arr[i] << 48) | (arr[32 + i] << 32) | (arr[64 + i] << 16) | arr[96 + i]; + } + } + + private static void expand32(long[] arr) { + for (int i = 0; i < 64; ++i) { + long l = arr[i]; + arr[i] = l >>> 32; + arr[64 + i] = l & 0xFFFFFFFFL; + } + } + + private static void collapse32(long[] arr) { + for (int i = 0; i < 64; ++i) { + arr[i] = (arr[i] << 32) | arr[64 + i]; + } + } + + private final long[] tmp = new long[BLOCK_SIZE / 2]; + + /** Encode 128 integers from {@code longs} into {@code out}. */ + void encode(long[] longs, int bitsPerValue, DataOutput out) throws IOException { + final int nextPrimitive; + final int numLongs; + if (bitsPerValue <= 8) { + nextPrimitive = 8; + numLongs = BLOCK_SIZE / 8; + collapse8(longs); + } else if (bitsPerValue <= 16) { + nextPrimitive = 16; + numLongs = BLOCK_SIZE / 4; + collapse16(longs); + } else { + nextPrimitive = 32; + numLongs = BLOCK_SIZE / 2; + collapse32(longs); + } + + final int numLongsPerShift = bitsPerValue * 2; + int idx = 0; + int shift = nextPrimitive - bitsPerValue; + for (int i = 0; i < numLongsPerShift; ++i) { + tmp[i] = longs[idx++] << shift; + } + for (shift = shift - bitsPerValue; shift >= 0; shift -= bitsPerValue) { + for (int i = 0; i < numLongsPerShift; ++i) { + tmp[i] |= longs[idx++] << shift; + } + } + + final int remainingBitsPerLong = shift + bitsPerValue; + final long maskRemainingBitsPerLong; + if (nextPrimitive == 8) { + maskRemainingBitsPerLong = MASKS8[remainingBitsPerLong]; + } else if (nextPrimitive == 16) { + maskRemainingBitsPerLong = MASKS16[remainingBitsPerLong]; + } else { + maskRemainingBitsPerLong = MASKS32[remainingBitsPerLong]; + } + + int tmpIdx = 0; + int remainingBitsPerValue = bitsPerValue; + while (idx < numLongs) { + if (remainingBitsPerValue >= remainingBitsPerLong) { + remainingBitsPerValue -= remainingBitsPerLong; + tmp[tmpIdx++] |= (longs[idx] >>> remainingBitsPerValue) & maskRemainingBitsPerLong; + if (remainingBitsPerValue == 0) { + idx++; + remainingBitsPerValue = bitsPerValue; + } + } else { + final long mask1, mask2; + if (nextPrimitive == 8) { + mask1 = MASKS8[remainingBitsPerValue]; + mask2 = MASKS8[remainingBitsPerLong - remainingBitsPerValue]; + } else if (nextPrimitive == 16) { + mask1 = MASKS16[remainingBitsPerValue]; + mask2 = MASKS16[remainingBitsPerLong - remainingBitsPerValue]; + } else { + mask1 = MASKS32[remainingBitsPerValue]; + mask2 = MASKS32[remainingBitsPerLong - remainingBitsPerValue]; + } + tmp[tmpIdx] |= (longs[idx++] & mask1) << (remainingBitsPerLong - remainingBitsPerValue); + remainingBitsPerValue = bitsPerValue - remainingBitsPerLong + remainingBitsPerValue; + tmp[tmpIdx++] |= (longs[idx] >>> remainingBitsPerValue) & mask2; + } + } + + for (int i = 0; i < numLongsPerShift; ++i) { + out.writeLong(tmp[i]); + } + } + + /** Number of bytes required to encode 128 integers of {@code bitsPerValue} bits per value. */ + int numBytes(int bitsPerValue) { + return bitsPerValue << (BLOCK_SIZE_LOG2 - 3); + } + + private static void decodeSlow(int bitsPerValue, DataInput in, long[] tmp, long[] longs) throws IOException { + final int numLongs = bitsPerValue << 1; + in.readLongs(tmp, 0, numLongs); + final long mask = MASKS32[bitsPerValue]; + int longsIdx = 0; + int shift = 32 - bitsPerValue; + for (; shift >= 0; shift -= bitsPerValue) { + shiftLongs(tmp, numLongs, longs, longsIdx, shift, mask); + longsIdx += numLongs; + } + final int remainingBitsPerLong = shift + bitsPerValue; + final long mask32RemainingBitsPerLong = MASKS32[remainingBitsPerLong]; + int tmpIdx = 0; + int remainingBits = remainingBitsPerLong; + for (; longsIdx < BLOCK_SIZE / 2; ++longsIdx) { + int b = bitsPerValue - remainingBits; + long l = (tmp[tmpIdx++] & MASKS32[remainingBits]) << b; + while (b >= remainingBitsPerLong) { + b -= remainingBitsPerLong; + l |= (tmp[tmpIdx++] & mask32RemainingBitsPerLong) << b; + } + if (b > 0) { + l |= (tmp[tmpIdx] >>> (remainingBitsPerLong - b)) & MASKS32[b]; + remainingBits = remainingBitsPerLong - b; + } else { + remainingBits = remainingBitsPerLong; + } + longs[longsIdx] = l; + } + } + + /** + * The pattern that this shiftLongs method applies is recognized by the C2 compiler, which + * generates SIMD instructions for it in order to shift multiple longs at once. + */ + private static void shiftLongs(long[] a, int count, long[] b, int bi, int shift, long mask) { + for (int i = 0; i < count; ++i) { + b[bi + i] = (a[i] >>> shift) & mask; + } + } + + private static final long[] MASKS8 = new long[8]; + private static final long[] MASKS16 = new long[16]; + private static final long[] MASKS32 = new long[32]; + + static { + for (int i = 0; i < 8; ++i) { + MASKS8[i] = mask8(i); + } + for (int i = 0; i < 16; ++i) { + MASKS16[i] = mask16(i); + } + for (int i = 0; i < 32; ++i) { + MASKS32[i] = mask32(i); + } + } + + // mark values in array as final longs to avoid the cost of reading array, arrays should only be + // used when the idx is a variable + private static final long MASK8_1 = MASKS8[1]; + private static final long MASK8_2 = MASKS8[2]; + private static final long MASK8_3 = MASKS8[3]; + private static final long MASK8_4 = MASKS8[4]; + private static final long MASK8_5 = MASKS8[5]; + private static final long MASK8_6 = MASKS8[6]; + private static final long MASK8_7 = MASKS8[7]; + private static final long MASK16_1 = MASKS16[1]; + private static final long MASK16_2 = MASKS16[2]; + private static final long MASK16_3 = MASKS16[3]; + private static final long MASK16_4 = MASKS16[4]; + private static final long MASK16_5 = MASKS16[5]; + private static final long MASK16_6 = MASKS16[6]; + private static final long MASK16_7 = MASKS16[7]; + private static final long MASK16_9 = MASKS16[9]; + private static final long MASK16_10 = MASKS16[10]; + private static final long MASK16_11 = MASKS16[11]; + private static final long MASK16_12 = MASKS16[12]; + private static final long MASK16_13 = MASKS16[13]; + private static final long MASK16_14 = MASKS16[14]; + private static final long MASK16_15 = MASKS16[15]; + private static final long MASK32_1 = MASKS32[1]; + private static final long MASK32_2 = MASKS32[2]; + private static final long MASK32_3 = MASKS32[3]; + private static final long MASK32_4 = MASKS32[4]; + private static final long MASK32_5 = MASKS32[5]; + private static final long MASK32_6 = MASKS32[6]; + private static final long MASK32_7 = MASKS32[7]; + private static final long MASK32_8 = MASKS32[8]; + private static final long MASK32_9 = MASKS32[9]; + private static final long MASK32_10 = MASKS32[10]; + private static final long MASK32_11 = MASKS32[11]; + private static final long MASK32_12 = MASKS32[12]; + private static final long MASK32_13 = MASKS32[13]; + private static final long MASK32_14 = MASKS32[14]; + private static final long MASK32_15 = MASKS32[15]; + private static final long MASK32_17 = MASKS32[17]; + private static final long MASK32_18 = MASKS32[18]; + private static final long MASK32_19 = MASKS32[19]; + private static final long MASK32_20 = MASKS32[20]; + private static final long MASK32_21 = MASKS32[21]; + private static final long MASK32_22 = MASKS32[22]; + private static final long MASK32_23 = MASKS32[23]; + private static final long MASK32_24 = MASKS32[24]; + + /** Decode 128 integers into {@code longs}. */ + void decode(int bitsPerValue, DataInput in, long[] longs) throws IOException { + switch (bitsPerValue) { + case 1: + decode1(in, tmp, longs); + expand8(longs); + break; + case 2: + decode2(in, tmp, longs); + expand8(longs); + break; + case 3: + decode3(in, tmp, longs); + expand8(longs); + break; + case 4: + decode4(in, tmp, longs); + expand8(longs); + break; + case 5: + decode5(in, tmp, longs); + expand8(longs); + break; + case 6: + decode6(in, tmp, longs); + expand8(longs); + break; + case 7: + decode7(in, tmp, longs); + expand8(longs); + break; + case 8: + decode8(in, tmp, longs); + expand8(longs); + break; + case 9: + decode9(in, tmp, longs); + expand16(longs); + break; + case 10: + decode10(in, tmp, longs); + expand16(longs); + break; + case 11: + decode11(in, tmp, longs); + expand16(longs); + break; + case 12: + decode12(in, tmp, longs); + expand16(longs); + break; + case 13: + decode13(in, tmp, longs); + expand16(longs); + break; + case 14: + decode14(in, tmp, longs); + expand16(longs); + break; + case 15: + decode15(in, tmp, longs); + expand16(longs); + break; + case 16: + decode16(in, tmp, longs); + expand16(longs); + break; + case 17: + decode17(in, tmp, longs); + expand32(longs); + break; + case 18: + decode18(in, tmp, longs); + expand32(longs); + break; + case 19: + decode19(in, tmp, longs); + expand32(longs); + break; + case 20: + decode20(in, tmp, longs); + expand32(longs); + break; + case 21: + decode21(in, tmp, longs); + expand32(longs); + break; + case 22: + decode22(in, tmp, longs); + expand32(longs); + break; + case 23: + decode23(in, tmp, longs); + expand32(longs); + break; + case 24: + decode24(in, tmp, longs); + expand32(longs); + break; + default: + decodeSlow(bitsPerValue, in, tmp, longs); + expand32(longs); + break; + } + } + + /** + * Decodes 128 integers into 64 {@code longs} such that each long contains two values, each + * represented with 32 bits. Values [0..63] are encoded in the high-order bits of {@code longs} + * [0..63], and values [64..127] are encoded in the low-order bits of {@code longs} [0..63]. This + * representation may allow subsequent operations to be performed on two values at a time. + */ + void decodeTo32(int bitsPerValue, DataInput in, long[] longs) throws IOException { + switch (bitsPerValue) { + case 1: + decode1(in, tmp, longs); + expand8To32(longs); + break; + case 2: + decode2(in, tmp, longs); + expand8To32(longs); + break; + case 3: + decode3(in, tmp, longs); + expand8To32(longs); + break; + case 4: + decode4(in, tmp, longs); + expand8To32(longs); + break; + case 5: + decode5(in, tmp, longs); + expand8To32(longs); + break; + case 6: + decode6(in, tmp, longs); + expand8To32(longs); + break; + case 7: + decode7(in, tmp, longs); + expand8To32(longs); + break; + case 8: + decode8(in, tmp, longs); + expand8To32(longs); + break; + case 9: + decode9(in, tmp, longs); + expand16To32(longs); + break; + case 10: + decode10(in, tmp, longs); + expand16To32(longs); + break; + case 11: + decode11(in, tmp, longs); + expand16To32(longs); + break; + case 12: + decode12(in, tmp, longs); + expand16To32(longs); + break; + case 13: + decode13(in, tmp, longs); + expand16To32(longs); + break; + case 14: + decode14(in, tmp, longs); + expand16To32(longs); + break; + case 15: + decode15(in, tmp, longs); + expand16To32(longs); + break; + case 16: + decode16(in, tmp, longs); + expand16To32(longs); + break; + case 17: + decode17(in, tmp, longs); + break; + case 18: + decode18(in, tmp, longs); + break; + case 19: + decode19(in, tmp, longs); + break; + case 20: + decode20(in, tmp, longs); + break; + case 21: + decode21(in, tmp, longs); + break; + case 22: + decode22(in, tmp, longs); + break; + case 23: + decode23(in, tmp, longs); + break; + case 24: + decode24(in, tmp, longs); + break; + default: + decodeSlow(bitsPerValue, in, tmp, longs); + break; + } + } + + private static void decode1(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 2); + shiftLongs(tmp, 2, longs, 0, 7, MASK8_1); + shiftLongs(tmp, 2, longs, 2, 6, MASK8_1); + shiftLongs(tmp, 2, longs, 4, 5, MASK8_1); + shiftLongs(tmp, 2, longs, 6, 4, MASK8_1); + shiftLongs(tmp, 2, longs, 8, 3, MASK8_1); + shiftLongs(tmp, 2, longs, 10, 2, MASK8_1); + shiftLongs(tmp, 2, longs, 12, 1, MASK8_1); + shiftLongs(tmp, 2, longs, 14, 0, MASK8_1); + } + + private static void decode2(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 4); + shiftLongs(tmp, 4, longs, 0, 6, MASK8_2); + shiftLongs(tmp, 4, longs, 4, 4, MASK8_2); + shiftLongs(tmp, 4, longs, 8, 2, MASK8_2); + shiftLongs(tmp, 4, longs, 12, 0, MASK8_2); + } + + private static void decode3(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 6); + shiftLongs(tmp, 6, longs, 0, 5, MASK8_3); + shiftLongs(tmp, 6, longs, 6, 2, MASK8_3); + for (int iter = 0, tmpIdx = 0, longsIdx = 12; iter < 2; ++iter, tmpIdx += 3, longsIdx += 2) { + long l0 = (tmp[tmpIdx + 0] & MASK8_2) << 1; + l0 |= (tmp[tmpIdx + 1] >>> 1) & MASK8_1; + longs[longsIdx + 0] = l0; + long l1 = (tmp[tmpIdx + 1] & MASK8_1) << 2; + l1 |= (tmp[tmpIdx + 2] & MASK8_2) << 0; + longs[longsIdx + 1] = l1; + } + } + + private static void decode4(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 8); + shiftLongs(tmp, 8, longs, 0, 4, MASK8_4); + shiftLongs(tmp, 8, longs, 8, 0, MASK8_4); + } + + private static void decode5(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 10); + shiftLongs(tmp, 10, longs, 0, 3, MASK8_5); + for (int iter = 0, tmpIdx = 0, longsIdx = 10; iter < 2; ++iter, tmpIdx += 5, longsIdx += 3) { + long l0 = (tmp[tmpIdx + 0] & MASK8_3) << 2; + l0 |= (tmp[tmpIdx + 1] >>> 1) & MASK8_2; + longs[longsIdx + 0] = l0; + long l1 = (tmp[tmpIdx + 1] & MASK8_1) << 4; + l1 |= (tmp[tmpIdx + 2] & MASK8_3) << 1; + l1 |= (tmp[tmpIdx + 3] >>> 2) & MASK8_1; + longs[longsIdx + 1] = l1; + long l2 = (tmp[tmpIdx + 3] & MASK8_2) << 3; + l2 |= (tmp[tmpIdx + 4] & MASK8_3) << 0; + longs[longsIdx + 2] = l2; + } + } + + private static void decode6(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 12); + shiftLongs(tmp, 12, longs, 0, 2, MASK8_6); + shiftLongs(tmp, 12, tmp, 0, 0, MASK8_2); + for (int iter = 0, tmpIdx = 0, longsIdx = 12; iter < 4; ++iter, tmpIdx += 3, longsIdx += 1) { + long l0 = tmp[tmpIdx + 0] << 4; + l0 |= tmp[tmpIdx + 1] << 2; + l0 |= tmp[tmpIdx + 2] << 0; + longs[longsIdx + 0] = l0; + } + } + + private static void decode7(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 14); + shiftLongs(tmp, 14, longs, 0, 1, MASK8_7); + shiftLongs(tmp, 14, tmp, 0, 0, MASK8_1); + for (int iter = 0, tmpIdx = 0, longsIdx = 14; iter < 2; ++iter, tmpIdx += 7, longsIdx += 1) { + long l0 = tmp[tmpIdx + 0] << 6; + l0 |= tmp[tmpIdx + 1] << 5; + l0 |= tmp[tmpIdx + 2] << 4; + l0 |= tmp[tmpIdx + 3] << 3; + l0 |= tmp[tmpIdx + 4] << 2; + l0 |= tmp[tmpIdx + 5] << 1; + l0 |= tmp[tmpIdx + 6] << 0; + longs[longsIdx + 0] = l0; + } + } + + private static void decode8(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(longs, 0, 16); + } + + private static void decode9(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 18); + shiftLongs(tmp, 18, longs, 0, 7, MASK16_9); + for (int iter = 0, tmpIdx = 0, longsIdx = 18; iter < 2; ++iter, tmpIdx += 9, longsIdx += 7) { + long l0 = (tmp[tmpIdx + 0] & MASK16_7) << 2; + l0 |= (tmp[tmpIdx + 1] >>> 5) & MASK16_2; + longs[longsIdx + 0] = l0; + long l1 = (tmp[tmpIdx + 1] & MASK16_5) << 4; + l1 |= (tmp[tmpIdx + 2] >>> 3) & MASK16_4; + longs[longsIdx + 1] = l1; + long l2 = (tmp[tmpIdx + 2] & MASK16_3) << 6; + l2 |= (tmp[tmpIdx + 3] >>> 1) & MASK16_6; + longs[longsIdx + 2] = l2; + long l3 = (tmp[tmpIdx + 3] & MASK16_1) << 8; + l3 |= (tmp[tmpIdx + 4] & MASK16_7) << 1; + l3 |= (tmp[tmpIdx + 5] >>> 6) & MASK16_1; + longs[longsIdx + 3] = l3; + long l4 = (tmp[tmpIdx + 5] & MASK16_6) << 3; + l4 |= (tmp[tmpIdx + 6] >>> 4) & MASK16_3; + longs[longsIdx + 4] = l4; + long l5 = (tmp[tmpIdx + 6] & MASK16_4) << 5; + l5 |= (tmp[tmpIdx + 7] >>> 2) & MASK16_5; + longs[longsIdx + 5] = l5; + long l6 = (tmp[tmpIdx + 7] & MASK16_2) << 7; + l6 |= (tmp[tmpIdx + 8] & MASK16_7) << 0; + longs[longsIdx + 6] = l6; + } + } + + private static void decode10(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 20); + shiftLongs(tmp, 20, longs, 0, 6, MASK16_10); + for (int iter = 0, tmpIdx = 0, longsIdx = 20; iter < 4; ++iter, tmpIdx += 5, longsIdx += 3) { + long l0 = (tmp[tmpIdx + 0] & MASK16_6) << 4; + l0 |= (tmp[tmpIdx + 1] >>> 2) & MASK16_4; + longs[longsIdx + 0] = l0; + long l1 = (tmp[tmpIdx + 1] & MASK16_2) << 8; + l1 |= (tmp[tmpIdx + 2] & MASK16_6) << 2; + l1 |= (tmp[tmpIdx + 3] >>> 4) & MASK16_2; + longs[longsIdx + 1] = l1; + long l2 = (tmp[tmpIdx + 3] & MASK16_4) << 6; + l2 |= (tmp[tmpIdx + 4] & MASK16_6) << 0; + longs[longsIdx + 2] = l2; + } + } + + private static void decode11(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 22); + shiftLongs(tmp, 22, longs, 0, 5, MASK16_11); + for (int iter = 0, tmpIdx = 0, longsIdx = 22; iter < 2; ++iter, tmpIdx += 11, longsIdx += 5) { + long l0 = (tmp[tmpIdx + 0] & MASK16_5) << 6; + l0 |= (tmp[tmpIdx + 1] & MASK16_5) << 1; + l0 |= (tmp[tmpIdx + 2] >>> 4) & MASK16_1; + longs[longsIdx + 0] = l0; + long l1 = (tmp[tmpIdx + 2] & MASK16_4) << 7; + l1 |= (tmp[tmpIdx + 3] & MASK16_5) << 2; + l1 |= (tmp[tmpIdx + 4] >>> 3) & MASK16_2; + longs[longsIdx + 1] = l1; + long l2 = (tmp[tmpIdx + 4] & MASK16_3) << 8; + l2 |= (tmp[tmpIdx + 5] & MASK16_5) << 3; + l2 |= (tmp[tmpIdx + 6] >>> 2) & MASK16_3; + longs[longsIdx + 2] = l2; + long l3 = (tmp[tmpIdx + 6] & MASK16_2) << 9; + l3 |= (tmp[tmpIdx + 7] & MASK16_5) << 4; + l3 |= (tmp[tmpIdx + 8] >>> 1) & MASK16_4; + longs[longsIdx + 3] = l3; + long l4 = (tmp[tmpIdx + 8] & MASK16_1) << 10; + l4 |= (tmp[tmpIdx + 9] & MASK16_5) << 5; + l4 |= (tmp[tmpIdx + 10] & MASK16_5) << 0; + longs[longsIdx + 4] = l4; + } + } + + private static void decode12(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 24); + shiftLongs(tmp, 24, longs, 0, 4, MASK16_12); + shiftLongs(tmp, 24, tmp, 0, 0, MASK16_4); + for (int iter = 0, tmpIdx = 0, longsIdx = 24; iter < 8; ++iter, tmpIdx += 3, longsIdx += 1) { + long l0 = tmp[tmpIdx + 0] << 8; + l0 |= tmp[tmpIdx + 1] << 4; + l0 |= tmp[tmpIdx + 2] << 0; + longs[longsIdx + 0] = l0; + } + } + + private static void decode13(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 26); + shiftLongs(tmp, 26, longs, 0, 3, MASK16_13); + for (int iter = 0, tmpIdx = 0, longsIdx = 26; iter < 2; ++iter, tmpIdx += 13, longsIdx += 3) { + long l0 = (tmp[tmpIdx + 0] & MASK16_3) << 10; + l0 |= (tmp[tmpIdx + 1] & MASK16_3) << 7; + l0 |= (tmp[tmpIdx + 2] & MASK16_3) << 4; + l0 |= (tmp[tmpIdx + 3] & MASK16_3) << 1; + l0 |= (tmp[tmpIdx + 4] >>> 2) & MASK16_1; + longs[longsIdx + 0] = l0; + long l1 = (tmp[tmpIdx + 4] & MASK16_2) << 11; + l1 |= (tmp[tmpIdx + 5] & MASK16_3) << 8; + l1 |= (tmp[tmpIdx + 6] & MASK16_3) << 5; + l1 |= (tmp[tmpIdx + 7] & MASK16_3) << 2; + l1 |= (tmp[tmpIdx + 8] >>> 1) & MASK16_2; + longs[longsIdx + 1] = l1; + long l2 = (tmp[tmpIdx + 8] & MASK16_1) << 12; + l2 |= (tmp[tmpIdx + 9] & MASK16_3) << 9; + l2 |= (tmp[tmpIdx + 10] & MASK16_3) << 6; + l2 |= (tmp[tmpIdx + 11] & MASK16_3) << 3; + l2 |= (tmp[tmpIdx + 12] & MASK16_3) << 0; + longs[longsIdx + 2] = l2; + } + } + + private static void decode14(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 28); + shiftLongs(tmp, 28, longs, 0, 2, MASK16_14); + shiftLongs(tmp, 28, tmp, 0, 0, MASK16_2); + for (int iter = 0, tmpIdx = 0, longsIdx = 28; iter < 4; ++iter, tmpIdx += 7, longsIdx += 1) { + long l0 = tmp[tmpIdx + 0] << 12; + l0 |= tmp[tmpIdx + 1] << 10; + l0 |= tmp[tmpIdx + 2] << 8; + l0 |= tmp[tmpIdx + 3] << 6; + l0 |= tmp[tmpIdx + 4] << 4; + l0 |= tmp[tmpIdx + 5] << 2; + l0 |= tmp[tmpIdx + 6] << 0; + longs[longsIdx + 0] = l0; + } + } + + private static void decode15(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 30); + shiftLongs(tmp, 30, longs, 0, 1, MASK16_15); + shiftLongs(tmp, 30, tmp, 0, 0, MASK16_1); + for (int iter = 0, tmpIdx = 0, longsIdx = 30; iter < 2; ++iter, tmpIdx += 15, longsIdx += 1) { + long l0 = tmp[tmpIdx + 0] << 14; + l0 |= tmp[tmpIdx + 1] << 13; + l0 |= tmp[tmpIdx + 2] << 12; + l0 |= tmp[tmpIdx + 3] << 11; + l0 |= tmp[tmpIdx + 4] << 10; + l0 |= tmp[tmpIdx + 5] << 9; + l0 |= tmp[tmpIdx + 6] << 8; + l0 |= tmp[tmpIdx + 7] << 7; + l0 |= tmp[tmpIdx + 8] << 6; + l0 |= tmp[tmpIdx + 9] << 5; + l0 |= tmp[tmpIdx + 10] << 4; + l0 |= tmp[tmpIdx + 11] << 3; + l0 |= tmp[tmpIdx + 12] << 2; + l0 |= tmp[tmpIdx + 13] << 1; + l0 |= tmp[tmpIdx + 14] << 0; + longs[longsIdx + 0] = l0; + } + } + + private static void decode16(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(longs, 0, 32); + } + + private static void decode17(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 34); + shiftLongs(tmp, 34, longs, 0, 15, MASK32_17); + for (int iter = 0, tmpIdx = 0, longsIdx = 34; iter < 2; ++iter, tmpIdx += 17, longsIdx += 15) { + long l0 = (tmp[tmpIdx + 0] & MASK32_15) << 2; + l0 |= (tmp[tmpIdx + 1] >>> 13) & MASK32_2; + longs[longsIdx + 0] = l0; + long l1 = (tmp[tmpIdx + 1] & MASK32_13) << 4; + l1 |= (tmp[tmpIdx + 2] >>> 11) & MASK32_4; + longs[longsIdx + 1] = l1; + long l2 = (tmp[tmpIdx + 2] & MASK32_11) << 6; + l2 |= (tmp[tmpIdx + 3] >>> 9) & MASK32_6; + longs[longsIdx + 2] = l2; + long l3 = (tmp[tmpIdx + 3] & MASK32_9) << 8; + l3 |= (tmp[tmpIdx + 4] >>> 7) & MASK32_8; + longs[longsIdx + 3] = l3; + long l4 = (tmp[tmpIdx + 4] & MASK32_7) << 10; + l4 |= (tmp[tmpIdx + 5] >>> 5) & MASK32_10; + longs[longsIdx + 4] = l4; + long l5 = (tmp[tmpIdx + 5] & MASK32_5) << 12; + l5 |= (tmp[tmpIdx + 6] >>> 3) & MASK32_12; + longs[longsIdx + 5] = l5; + long l6 = (tmp[tmpIdx + 6] & MASK32_3) << 14; + l6 |= (tmp[tmpIdx + 7] >>> 1) & MASK32_14; + longs[longsIdx + 6] = l6; + long l7 = (tmp[tmpIdx + 7] & MASK32_1) << 16; + l7 |= (tmp[tmpIdx + 8] & MASK32_15) << 1; + l7 |= (tmp[tmpIdx + 9] >>> 14) & MASK32_1; + longs[longsIdx + 7] = l7; + long l8 = (tmp[tmpIdx + 9] & MASK32_14) << 3; + l8 |= (tmp[tmpIdx + 10] >>> 12) & MASK32_3; + longs[longsIdx + 8] = l8; + long l9 = (tmp[tmpIdx + 10] & MASK32_12) << 5; + l9 |= (tmp[tmpIdx + 11] >>> 10) & MASK32_5; + longs[longsIdx + 9] = l9; + long l10 = (tmp[tmpIdx + 11] & MASK32_10) << 7; + l10 |= (tmp[tmpIdx + 12] >>> 8) & MASK32_7; + longs[longsIdx + 10] = l10; + long l11 = (tmp[tmpIdx + 12] & MASK32_8) << 9; + l11 |= (tmp[tmpIdx + 13] >>> 6) & MASK32_9; + longs[longsIdx + 11] = l11; + long l12 = (tmp[tmpIdx + 13] & MASK32_6) << 11; + l12 |= (tmp[tmpIdx + 14] >>> 4) & MASK32_11; + longs[longsIdx + 12] = l12; + long l13 = (tmp[tmpIdx + 14] & MASK32_4) << 13; + l13 |= (tmp[tmpIdx + 15] >>> 2) & MASK32_13; + longs[longsIdx + 13] = l13; + long l14 = (tmp[tmpIdx + 15] & MASK32_2) << 15; + l14 |= (tmp[tmpIdx + 16] & MASK32_15) << 0; + longs[longsIdx + 14] = l14; + } + } + + private static void decode18(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 36); + shiftLongs(tmp, 36, longs, 0, 14, MASK32_18); + for (int iter = 0, tmpIdx = 0, longsIdx = 36; iter < 4; ++iter, tmpIdx += 9, longsIdx += 7) { + long l0 = (tmp[tmpIdx + 0] & MASK32_14) << 4; + l0 |= (tmp[tmpIdx + 1] >>> 10) & MASK32_4; + longs[longsIdx + 0] = l0; + long l1 = (tmp[tmpIdx + 1] & MASK32_10) << 8; + l1 |= (tmp[tmpIdx + 2] >>> 6) & MASK32_8; + longs[longsIdx + 1] = l1; + long l2 = (tmp[tmpIdx + 2] & MASK32_6) << 12; + l2 |= (tmp[tmpIdx + 3] >>> 2) & MASK32_12; + longs[longsIdx + 2] = l2; + long l3 = (tmp[tmpIdx + 3] & MASK32_2) << 16; + l3 |= (tmp[tmpIdx + 4] & MASK32_14) << 2; + l3 |= (tmp[tmpIdx + 5] >>> 12) & MASK32_2; + longs[longsIdx + 3] = l3; + long l4 = (tmp[tmpIdx + 5] & MASK32_12) << 6; + l4 |= (tmp[tmpIdx + 6] >>> 8) & MASK32_6; + longs[longsIdx + 4] = l4; + long l5 = (tmp[tmpIdx + 6] & MASK32_8) << 10; + l5 |= (tmp[tmpIdx + 7] >>> 4) & MASK32_10; + longs[longsIdx + 5] = l5; + long l6 = (tmp[tmpIdx + 7] & MASK32_4) << 14; + l6 |= (tmp[tmpIdx + 8] & MASK32_14) << 0; + longs[longsIdx + 6] = l6; + } + } + + private static void decode19(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 38); + shiftLongs(tmp, 38, longs, 0, 13, MASK32_19); + for (int iter = 0, tmpIdx = 0, longsIdx = 38; iter < 2; ++iter, tmpIdx += 19, longsIdx += 13) { + long l0 = (tmp[tmpIdx + 0] & MASK32_13) << 6; + l0 |= (tmp[tmpIdx + 1] >>> 7) & MASK32_6; + longs[longsIdx + 0] = l0; + long l1 = (tmp[tmpIdx + 1] & MASK32_7) << 12; + l1 |= (tmp[tmpIdx + 2] >>> 1) & MASK32_12; + longs[longsIdx + 1] = l1; + long l2 = (tmp[tmpIdx + 2] & MASK32_1) << 18; + l2 |= (tmp[tmpIdx + 3] & MASK32_13) << 5; + l2 |= (tmp[tmpIdx + 4] >>> 8) & MASK32_5; + longs[longsIdx + 2] = l2; + long l3 = (tmp[tmpIdx + 4] & MASK32_8) << 11; + l3 |= (tmp[tmpIdx + 5] >>> 2) & MASK32_11; + longs[longsIdx + 3] = l3; + long l4 = (tmp[tmpIdx + 5] & MASK32_2) << 17; + l4 |= (tmp[tmpIdx + 6] & MASK32_13) << 4; + l4 |= (tmp[tmpIdx + 7] >>> 9) & MASK32_4; + longs[longsIdx + 4] = l4; + long l5 = (tmp[tmpIdx + 7] & MASK32_9) << 10; + l5 |= (tmp[tmpIdx + 8] >>> 3) & MASK32_10; + longs[longsIdx + 5] = l5; + long l6 = (tmp[tmpIdx + 8] & MASK32_3) << 16; + l6 |= (tmp[tmpIdx + 9] & MASK32_13) << 3; + l6 |= (tmp[tmpIdx + 10] >>> 10) & MASK32_3; + longs[longsIdx + 6] = l6; + long l7 = (tmp[tmpIdx + 10] & MASK32_10) << 9; + l7 |= (tmp[tmpIdx + 11] >>> 4) & MASK32_9; + longs[longsIdx + 7] = l7; + long l8 = (tmp[tmpIdx + 11] & MASK32_4) << 15; + l8 |= (tmp[tmpIdx + 12] & MASK32_13) << 2; + l8 |= (tmp[tmpIdx + 13] >>> 11) & MASK32_2; + longs[longsIdx + 8] = l8; + long l9 = (tmp[tmpIdx + 13] & MASK32_11) << 8; + l9 |= (tmp[tmpIdx + 14] >>> 5) & MASK32_8; + longs[longsIdx + 9] = l9; + long l10 = (tmp[tmpIdx + 14] & MASK32_5) << 14; + l10 |= (tmp[tmpIdx + 15] & MASK32_13) << 1; + l10 |= (tmp[tmpIdx + 16] >>> 12) & MASK32_1; + longs[longsIdx + 10] = l10; + long l11 = (tmp[tmpIdx + 16] & MASK32_12) << 7; + l11 |= (tmp[tmpIdx + 17] >>> 6) & MASK32_7; + longs[longsIdx + 11] = l11; + long l12 = (tmp[tmpIdx + 17] & MASK32_6) << 13; + l12 |= (tmp[tmpIdx + 18] & MASK32_13) << 0; + longs[longsIdx + 12] = l12; + } + } + + private static void decode20(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 40); + shiftLongs(tmp, 40, longs, 0, 12, MASK32_20); + for (int iter = 0, tmpIdx = 0, longsIdx = 40; iter < 8; ++iter, tmpIdx += 5, longsIdx += 3) { + long l0 = (tmp[tmpIdx + 0] & MASK32_12) << 8; + l0 |= (tmp[tmpIdx + 1] >>> 4) & MASK32_8; + longs[longsIdx + 0] = l0; + long l1 = (tmp[tmpIdx + 1] & MASK32_4) << 16; + l1 |= (tmp[tmpIdx + 2] & MASK32_12) << 4; + l1 |= (tmp[tmpIdx + 3] >>> 8) & MASK32_4; + longs[longsIdx + 1] = l1; + long l2 = (tmp[tmpIdx + 3] & MASK32_8) << 12; + l2 |= (tmp[tmpIdx + 4] & MASK32_12) << 0; + longs[longsIdx + 2] = l2; + } + } + + private static void decode21(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 42); + shiftLongs(tmp, 42, longs, 0, 11, MASK32_21); + for (int iter = 0, tmpIdx = 0, longsIdx = 42; iter < 2; ++iter, tmpIdx += 21, longsIdx += 11) { + long l0 = (tmp[tmpIdx + 0] & MASK32_11) << 10; + l0 |= (tmp[tmpIdx + 1] >>> 1) & MASK32_10; + longs[longsIdx + 0] = l0; + long l1 = (tmp[tmpIdx + 1] & MASK32_1) << 20; + l1 |= (tmp[tmpIdx + 2] & MASK32_11) << 9; + l1 |= (tmp[tmpIdx + 3] >>> 2) & MASK32_9; + longs[longsIdx + 1] = l1; + long l2 = (tmp[tmpIdx + 3] & MASK32_2) << 19; + l2 |= (tmp[tmpIdx + 4] & MASK32_11) << 8; + l2 |= (tmp[tmpIdx + 5] >>> 3) & MASK32_8; + longs[longsIdx + 2] = l2; + long l3 = (tmp[tmpIdx + 5] & MASK32_3) << 18; + l3 |= (tmp[tmpIdx + 6] & MASK32_11) << 7; + l3 |= (tmp[tmpIdx + 7] >>> 4) & MASK32_7; + longs[longsIdx + 3] = l3; + long l4 = (tmp[tmpIdx + 7] & MASK32_4) << 17; + l4 |= (tmp[tmpIdx + 8] & MASK32_11) << 6; + l4 |= (tmp[tmpIdx + 9] >>> 5) & MASK32_6; + longs[longsIdx + 4] = l4; + long l5 = (tmp[tmpIdx + 9] & MASK32_5) << 16; + l5 |= (tmp[tmpIdx + 10] & MASK32_11) << 5; + l5 |= (tmp[tmpIdx + 11] >>> 6) & MASK32_5; + longs[longsIdx + 5] = l5; + long l6 = (tmp[tmpIdx + 11] & MASK32_6) << 15; + l6 |= (tmp[tmpIdx + 12] & MASK32_11) << 4; + l6 |= (tmp[tmpIdx + 13] >>> 7) & MASK32_4; + longs[longsIdx + 6] = l6; + long l7 = (tmp[tmpIdx + 13] & MASK32_7) << 14; + l7 |= (tmp[tmpIdx + 14] & MASK32_11) << 3; + l7 |= (tmp[tmpIdx + 15] >>> 8) & MASK32_3; + longs[longsIdx + 7] = l7; + long l8 = (tmp[tmpIdx + 15] & MASK32_8) << 13; + l8 |= (tmp[tmpIdx + 16] & MASK32_11) << 2; + l8 |= (tmp[tmpIdx + 17] >>> 9) & MASK32_2; + longs[longsIdx + 8] = l8; + long l9 = (tmp[tmpIdx + 17] & MASK32_9) << 12; + l9 |= (tmp[tmpIdx + 18] & MASK32_11) << 1; + l9 |= (tmp[tmpIdx + 19] >>> 10) & MASK32_1; + longs[longsIdx + 9] = l9; + long l10 = (tmp[tmpIdx + 19] & MASK32_10) << 11; + l10 |= (tmp[tmpIdx + 20] & MASK32_11) << 0; + longs[longsIdx + 10] = l10; + } + } + + private static void decode22(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 44); + shiftLongs(tmp, 44, longs, 0, 10, MASK32_22); + for (int iter = 0, tmpIdx = 0, longsIdx = 44; iter < 4; ++iter, tmpIdx += 11, longsIdx += 5) { + long l0 = (tmp[tmpIdx + 0] & MASK32_10) << 12; + l0 |= (tmp[tmpIdx + 1] & MASK32_10) << 2; + l0 |= (tmp[tmpIdx + 2] >>> 8) & MASK32_2; + longs[longsIdx + 0] = l0; + long l1 = (tmp[tmpIdx + 2] & MASK32_8) << 14; + l1 |= (tmp[tmpIdx + 3] & MASK32_10) << 4; + l1 |= (tmp[tmpIdx + 4] >>> 6) & MASK32_4; + longs[longsIdx + 1] = l1; + long l2 = (tmp[tmpIdx + 4] & MASK32_6) << 16; + l2 |= (tmp[tmpIdx + 5] & MASK32_10) << 6; + l2 |= (tmp[tmpIdx + 6] >>> 4) & MASK32_6; + longs[longsIdx + 2] = l2; + long l3 = (tmp[tmpIdx + 6] & MASK32_4) << 18; + l3 |= (tmp[tmpIdx + 7] & MASK32_10) << 8; + l3 |= (tmp[tmpIdx + 8] >>> 2) & MASK32_8; + longs[longsIdx + 3] = l3; + long l4 = (tmp[tmpIdx + 8] & MASK32_2) << 20; + l4 |= (tmp[tmpIdx + 9] & MASK32_10) << 10; + l4 |= (tmp[tmpIdx + 10] & MASK32_10) << 0; + longs[longsIdx + 4] = l4; + } + } + + private static void decode23(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 46); + shiftLongs(tmp, 46, longs, 0, 9, MASK32_23); + for (int iter = 0, tmpIdx = 0, longsIdx = 46; iter < 2; ++iter, tmpIdx += 23, longsIdx += 9) { + long l0 = (tmp[tmpIdx + 0] & MASK32_9) << 14; + l0 |= (tmp[tmpIdx + 1] & MASK32_9) << 5; + l0 |= (tmp[tmpIdx + 2] >>> 4) & MASK32_5; + longs[longsIdx + 0] = l0; + long l1 = (tmp[tmpIdx + 2] & MASK32_4) << 19; + l1 |= (tmp[tmpIdx + 3] & MASK32_9) << 10; + l1 |= (tmp[tmpIdx + 4] & MASK32_9) << 1; + l1 |= (tmp[tmpIdx + 5] >>> 8) & MASK32_1; + longs[longsIdx + 1] = l1; + long l2 = (tmp[tmpIdx + 5] & MASK32_8) << 15; + l2 |= (tmp[tmpIdx + 6] & MASK32_9) << 6; + l2 |= (tmp[tmpIdx + 7] >>> 3) & MASK32_6; + longs[longsIdx + 2] = l2; + long l3 = (tmp[tmpIdx + 7] & MASK32_3) << 20; + l3 |= (tmp[tmpIdx + 8] & MASK32_9) << 11; + l3 |= (tmp[tmpIdx + 9] & MASK32_9) << 2; + l3 |= (tmp[tmpIdx + 10] >>> 7) & MASK32_2; + longs[longsIdx + 3] = l3; + long l4 = (tmp[tmpIdx + 10] & MASK32_7) << 16; + l4 |= (tmp[tmpIdx + 11] & MASK32_9) << 7; + l4 |= (tmp[tmpIdx + 12] >>> 2) & MASK32_7; + longs[longsIdx + 4] = l4; + long l5 = (tmp[tmpIdx + 12] & MASK32_2) << 21; + l5 |= (tmp[tmpIdx + 13] & MASK32_9) << 12; + l5 |= (tmp[tmpIdx + 14] & MASK32_9) << 3; + l5 |= (tmp[tmpIdx + 15] >>> 6) & MASK32_3; + longs[longsIdx + 5] = l5; + long l6 = (tmp[tmpIdx + 15] & MASK32_6) << 17; + l6 |= (tmp[tmpIdx + 16] & MASK32_9) << 8; + l6 |= (tmp[tmpIdx + 17] >>> 1) & MASK32_8; + longs[longsIdx + 6] = l6; + long l7 = (tmp[tmpIdx + 17] & MASK32_1) << 22; + l7 |= (tmp[tmpIdx + 18] & MASK32_9) << 13; + l7 |= (tmp[tmpIdx + 19] & MASK32_9) << 4; + l7 |= (tmp[tmpIdx + 20] >>> 5) & MASK32_4; + longs[longsIdx + 7] = l7; + long l8 = (tmp[tmpIdx + 20] & MASK32_5) << 18; + l8 |= (tmp[tmpIdx + 21] & MASK32_9) << 9; + l8 |= (tmp[tmpIdx + 22] & MASK32_9) << 0; + longs[longsIdx + 8] = l8; + } + } + + private static void decode24(DataInput in, long[] tmp, long[] longs) throws IOException { + in.readLongs(tmp, 0, 48); + shiftLongs(tmp, 48, longs, 0, 8, MASK32_24); + shiftLongs(tmp, 48, tmp, 0, 0, MASK32_8); + for (int iter = 0, tmpIdx = 0, longsIdx = 48; iter < 16; ++iter, tmpIdx += 3, longsIdx += 1) { + long l0 = tmp[tmpIdx + 0] << 16; + l0 |= tmp[tmpIdx + 1] << 8; + l0 |= tmp[tmpIdx + 2] << 0; + longs[longsIdx + 0] = l0; + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/codec/postings/PForUtil.java b/server/src/main/java/org/elasticsearch/index/codec/postings/PForUtil.java new file mode 100644 index 0000000000000..26a600c73eeb5 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/codec/postings/PForUtil.java @@ -0,0 +1,323 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2022 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.postings; + +import org.apache.lucene.store.DataInput; +import org.apache.lucene.store.DataOutput; +import org.apache.lucene.util.LongHeap; +import org.apache.lucene.util.packed.PackedInts; + +import java.io.IOException; +import java.util.Arrays; + +/** Utility class to encode sequences of 128 small positive integers. */ +final class PForUtil { + + private static final int MAX_EXCEPTIONS = 7; + private static final int HALF_BLOCK_SIZE = ForUtil.BLOCK_SIZE / 2; + + // IDENTITY_PLUS_ONE[i] == i + 1 + private static final long[] IDENTITY_PLUS_ONE = new long[ForUtil.BLOCK_SIZE]; + + static { + for (int i = 0; i < ForUtil.BLOCK_SIZE; ++i) { + IDENTITY_PLUS_ONE[i] = i + 1; + } + } + + static boolean allEqual(long[] l) { + for (int i = 1; i < ForUtil.BLOCK_SIZE; ++i) { + if (l[i] != l[0]) { + return false; + } + } + return true; + } + + private final ForUtil forUtil; + // buffer for reading exception data; each exception uses two bytes (pos + high-order bits of the + // exception) + private final byte[] exceptionBuff = new byte[MAX_EXCEPTIONS * 2]; + + PForUtil(ForUtil forUtil) { + assert ForUtil.BLOCK_SIZE <= 256 : "blocksize must fit in one byte. got " + ForUtil.BLOCK_SIZE; + this.forUtil = forUtil; + } + + /** Encode 128 integers from {@code longs} into {@code out}. */ + void encode(long[] longs, DataOutput out) throws IOException { + // Determine the top MAX_EXCEPTIONS + 1 values + final LongHeap top = new LongHeap(MAX_EXCEPTIONS + 1); + for (int i = 0; i <= MAX_EXCEPTIONS; ++i) { + top.push(longs[i]); + } + long topValue = top.top(); + for (int i = MAX_EXCEPTIONS + 1; i < ForUtil.BLOCK_SIZE; ++i) { + if (longs[i] > topValue) { + topValue = top.updateTop(longs[i]); + } + } + + long max = 0L; + for (int i = 1; i <= top.size(); ++i) { + max = Math.max(max, top.get(i)); + } + + final int maxBitsRequired = PackedInts.bitsRequired(max); + // We store the patch on a byte, so we can't decrease the number of bits required by more than 8 + final int patchedBitsRequired = Math.max(PackedInts.bitsRequired(topValue), maxBitsRequired - 8); + int numExceptions = 0; + final long maxUnpatchedValue = (1L << patchedBitsRequired) - 1; + for (int i = 2; i <= top.size(); ++i) { + if (top.get(i) > maxUnpatchedValue) { + numExceptions++; + } + } + final byte[] exceptions = new byte[numExceptions * 2]; + if (numExceptions > 0) { + int exceptionCount = 0; + for (int i = 0; i < ForUtil.BLOCK_SIZE; ++i) { + if (longs[i] > maxUnpatchedValue) { + exceptions[exceptionCount * 2] = (byte) i; + exceptions[exceptionCount * 2 + 1] = (byte) (longs[i] >>> patchedBitsRequired); + longs[i] &= maxUnpatchedValue; + exceptionCount++; + } + } + assert exceptionCount == numExceptions : exceptionCount + " " + numExceptions; + } + + if (allEqual(longs) && maxBitsRequired <= 8) { + for (int i = 0; i < numExceptions; ++i) { + exceptions[2 * i + 1] = (byte) (Byte.toUnsignedLong(exceptions[2 * i + 1]) << patchedBitsRequired); + } + out.writeByte((byte) (numExceptions << 5)); + out.writeVLong(longs[0]); + } else { + final int token = (numExceptions << 5) | patchedBitsRequired; + out.writeByte((byte) token); + forUtil.encode(longs, patchedBitsRequired, out); + } + out.writeBytes(exceptions, exceptions.length); + } + + /** Decode 128 integers into {@code longs}. */ + void decode(DataInput in, long[] longs) throws IOException { + final int token = Byte.toUnsignedInt(in.readByte()); + final int bitsPerValue = token & 0x1f; + final int numExceptions = token >>> 5; + if (bitsPerValue == 0) { + Arrays.fill(longs, 0, ForUtil.BLOCK_SIZE, in.readVLong()); + } else { + forUtil.decode(bitsPerValue, in, longs); + } + for (int i = 0; i < numExceptions; ++i) { + longs[Byte.toUnsignedInt(in.readByte())] |= Byte.toUnsignedLong(in.readByte()) << bitsPerValue; + } + } + + /** Decode deltas, compute the prefix sum and add {@code base} to all decoded longs. */ + void decodeAndPrefixSum(DataInput in, long base, long[] longs) throws IOException { + final int token = Byte.toUnsignedInt(in.readByte()); + final int bitsPerValue = token & 0x1f; + final int numExceptions = token >>> 5; + if (numExceptions == 0) { + // when there are no exceptions to apply, we can be a bit more efficient with our decoding + if (bitsPerValue == 0) { + // a bpv of zero indicates all delta values are the same + long val = in.readVLong(); + if (val == 1) { + // this will often be the common case when working with doc IDs, so we special-case it to + // be slightly more efficient + prefixSumOfOnes(longs, base); + } else { + prefixSumOf(longs, base, val); + } + } else { + // decode the deltas then apply the prefix sum logic + forUtil.decodeTo32(bitsPerValue, in, longs); + prefixSum32(longs, base); + } + } else { + // pack two values per long so we can apply prefixes two-at-a-time + if (bitsPerValue == 0) { + fillSameValue32(longs, in.readVLong()); + } else { + forUtil.decodeTo32(bitsPerValue, in, longs); + } + applyExceptions32(bitsPerValue, numExceptions, in, longs); + prefixSum32(longs, base); + } + } + + /** Skip 128 integers. */ + void skip(DataInput in) throws IOException { + final int token = Byte.toUnsignedInt(in.readByte()); + final int bitsPerValue = token & 0x1f; + final int numExceptions = token >>> 5; + if (bitsPerValue == 0) { + in.readVLong(); + in.skipBytes((numExceptions << 1)); + } else { + in.skipBytes(forUtil.numBytes(bitsPerValue) + (numExceptions << 1)); + } + } + + /** + * Fill {@code longs} with the final values for the case of all deltas being 1. Note this assumes + * there are no exceptions to apply. + */ + private static void prefixSumOfOnes(long[] longs, long base) { + System.arraycopy(IDENTITY_PLUS_ONE, 0, longs, 0, ForUtil.BLOCK_SIZE); + // This loop gets auto-vectorized + for (int i = 0; i < ForUtil.BLOCK_SIZE; ++i) { + longs[i] += base; + } + } + + /** + * Fill {@code longs} with the final values for the case of all deltas being {@code val}. Note + * this assumes there are no exceptions to apply. + */ + private static void prefixSumOf(long[] longs, long base, long val) { + for (int i = 0; i < ForUtil.BLOCK_SIZE; i++) { + longs[i] = (i + 1) * val + base; + } + } + + /** + * Fills the {@code longs} with the provided {@code val}, packed two values per long (using 32 + * bits per value). + */ + private static void fillSameValue32(long[] longs, long val) { + final long token = val << 32 | val; + Arrays.fill(longs, 0, HALF_BLOCK_SIZE, token); + } + + /** Apply the exceptions where the values are packed two-per-long in {@code longs}. */ + private void applyExceptions32(int bitsPerValue, int numExceptions, DataInput in, long[] longs) throws IOException { + in.readBytes(exceptionBuff, 0, numExceptions * 2); + for (int i = 0; i < numExceptions; ++i) { + final int exceptionPos = Byte.toUnsignedInt(exceptionBuff[i * 2]); + final long exception = Byte.toUnsignedLong(exceptionBuff[i * 2 + 1]); + // note that we pack two values per long, so the index is [0..63] for 128 values + final int idx = exceptionPos & 0x3f; // mod 64 + // we need to shift by 1) the bpv, and 2) 32 for positions [0..63] (and no 32 shift for + // [64..127]) + final int shift = bitsPerValue + ((1 ^ (exceptionPos >>> 6)) << 5); + longs[idx] |= exception << shift; + } + } + + /** Apply prefix sum logic where the values are packed two-per-long in {@code longs}. */ + private static void prefixSum32(long[] longs, long base) { + longs[0] += base << 32; + innerPrefixSum32(longs); + expand32(longs); + final long l = longs[HALF_BLOCK_SIZE - 1]; + for (int i = HALF_BLOCK_SIZE; i < ForUtil.BLOCK_SIZE; ++i) { + longs[i] += l; + } + } + + /** + * Expand the values packed two-per-long in {@code longs} into 128 individual long values stored + * back into {@code longs}. + */ + private static void expand32(long[] longs) { + for (int i = 0; i < 64; ++i) { + final long l = longs[i]; + longs[i] = l >>> 32; + longs[64 + i] = l & 0xFFFFFFFFL; + } + } + + /** + * Unrolled "inner" prefix sum logic where the values are packed two-per-long in {@code longs}. + * After this method, the final values will be correct for all high-order bits (values [0..63]) + * but a final prefix loop will still need to run to "correct" the values of [64..127] in the + * low-order bits, which need the 64th value added to all of them. + */ + private static void innerPrefixSum32(long[] longs) { + longs[1] += longs[0]; + longs[2] += longs[1]; + longs[3] += longs[2]; + longs[4] += longs[3]; + longs[5] += longs[4]; + longs[6] += longs[5]; + longs[7] += longs[6]; + longs[8] += longs[7]; + longs[9] += longs[8]; + longs[10] += longs[9]; + longs[11] += longs[10]; + longs[12] += longs[11]; + longs[13] += longs[12]; + longs[14] += longs[13]; + longs[15] += longs[14]; + longs[16] += longs[15]; + longs[17] += longs[16]; + longs[18] += longs[17]; + longs[19] += longs[18]; + longs[20] += longs[19]; + longs[21] += longs[20]; + longs[22] += longs[21]; + longs[23] += longs[22]; + longs[24] += longs[23]; + longs[25] += longs[24]; + longs[26] += longs[25]; + longs[27] += longs[26]; + longs[28] += longs[27]; + longs[29] += longs[28]; + longs[30] += longs[29]; + longs[31] += longs[30]; + longs[32] += longs[31]; + longs[33] += longs[32]; + longs[34] += longs[33]; + longs[35] += longs[34]; + longs[36] += longs[35]; + longs[37] += longs[36]; + longs[38] += longs[37]; + longs[39] += longs[38]; + longs[40] += longs[39]; + longs[41] += longs[40]; + longs[42] += longs[41]; + longs[43] += longs[42]; + longs[44] += longs[43]; + longs[45] += longs[44]; + longs[46] += longs[45]; + longs[47] += longs[46]; + longs[48] += longs[47]; + longs[49] += longs[48]; + longs[50] += longs[49]; + longs[51] += longs[50]; + longs[52] += longs[51]; + longs[53] += longs[52]; + longs[54] += longs[53]; + longs[55] += longs[54]; + longs[56] += longs[55]; + longs[57] += longs[56]; + longs[58] += longs[57]; + longs[59] += longs[58]; + longs[60] += longs[59]; + longs[61] += longs[60]; + longs[62] += longs[61]; + longs[63] += longs[62]; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 4994eccb31d04..58a3c02316430 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -248,13 +248,12 @@ public InternalEngine(EngineConfig engineConfig) { throttle = new IndexThrottle(); try { store.trimUnsafeCommits(config().getTranslogConfig().getTranslogPath()); - translog = openTranslog(engineConfig, translogDeletionPolicy, engineConfig.getGlobalCheckpointSupplier(), seqNo -> { - final LocalCheckpointTracker tracker = getLocalCheckpointTracker(); - assert tracker != null || getTranslog().isOpen() == false; - if (tracker != null) { - tracker.markSeqNoAsPersisted(seqNo); - } - }); + translog = openTranslog( + engineConfig, + translogDeletionPolicy, + engineConfig.getGlobalCheckpointSupplier(), + translogPersistedSeqNoConsumer() + ); assert translog.getGeneration() != null; this.translog = translog; this.totalDiskSpace = new ByteSizeValue(Environment.getFileStore(translog.location()).getTotalSpace(), ByteSizeUnit.BYTES); @@ -346,6 +345,16 @@ private LocalCheckpointTracker createLocalCheckpointTracker( return localCheckpointTrackerSupplier.apply(maxSeqNo, localCheckpoint); } + protected LongConsumer translogPersistedSeqNoConsumer() { + return seqNo -> { + final LocalCheckpointTracker tracker = getLocalCheckpointTracker(); + assert tracker != null || getTranslog().isOpen() == false; + if (tracker != null) { + tracker.markSeqNoAsPersisted(seqNo); + } + }; + } + private SoftDeletesPolicy newSoftDeletesPolicy() throws IOException { final Map commitUserData = store.readLastCommittedSegmentsInfo().userData; final long lastMinRetainedSeqNo; @@ -2951,7 +2960,7 @@ public MergeStats getMergeStats() { return mergeScheduler.stats(); } - LocalCheckpointTracker getLocalCheckpointTracker() { + protected LocalCheckpointTracker getLocalCheckpointTracker() { return localCheckpointTracker; } diff --git a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java index ef0901bc17712..1cee2a90ec3f1 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMap.java @@ -64,11 +64,18 @@ public static final class VersionLookup { // Modifies the map of this instance by merging with the given VersionLookup public void merge(VersionLookup versionLookup) { + long existingEntriesSize = 0; + for (var entry : versionLookup.map.entrySet()) { + var existingValue = map.get(entry.getKey()); + existingEntriesSize += existingValue == null ? 0 : mapEntryBytesUsed(entry.getKey(), existingValue); + } map.putAll(versionLookup.map); + adjustRam(versionLookup.ramBytesUsed() - existingEntriesSize); minDeleteTimestamp.accumulateAndGet(versionLookup.minDeleteTimestamp(), Math::min); } - private VersionLookup(Map map) { + // Visible for testing + VersionLookup(Map map) { this.map = map; } @@ -77,7 +84,11 @@ public VersionValue get(BytesRef key) { } VersionValue put(BytesRef key, VersionValue value) { - return map.put(key, value); + long ramAccounting = mapEntryBytesUsed(key, value); + VersionValue previousValue = map.put(key, value); + ramAccounting += previousValue == null ? 0 : -mapEntryBytesUsed(key, previousValue); + adjustRam(ramAccounting); + return previousValue; } public boolean isEmpty() { @@ -96,8 +107,12 @@ void markAsUnsafe() { unsafe = true; } - public VersionValue remove(BytesRef uid) { - return map.remove(uid); + VersionValue remove(BytesRef uid) { + VersionValue previousValue = map.remove(uid); + if (previousValue != null) { + adjustRam(-mapEntryBytesUsed(uid, previousValue)); + } + return previousValue; } public void updateMinDeletedTimestamp(DeleteVersionValue delete) { @@ -107,6 +122,26 @@ public void updateMinDeletedTimestamp(DeleteVersionValue delete) { public long minDeleteTimestamp() { return minDeleteTimestamp.get(); } + + void adjustRam(long value) { + if (value != 0) { + long v = ramBytesUsed.addAndGet(value); + assert v >= 0 : "bytes=" + v; + } + } + + public long ramBytesUsed() { + return ramBytesUsed.get(); + } + + public static long mapEntryBytesUsed(BytesRef key, VersionValue value) { + return (BASE_BYTES_PER_BYTESREF + key.bytes.length) + (BASE_BYTES_PER_CHM_ENTRY + value.ramBytesUsed()); + } + + // Used only for testing + Map getMap() { + return map; + } } private static final class Maps { @@ -170,27 +205,12 @@ Maps invalidateOldMap(LiveVersionMapArchive archive) { } void put(BytesRef uid, VersionValue version) { - long uidRAMBytesUsed = BASE_BYTES_PER_BYTESREF + uid.bytes.length; - long ramAccounting = BASE_BYTES_PER_CHM_ENTRY + version.ramBytesUsed() + uidRAMBytesUsed; - VersionValue previousValue = current.put(uid, version); - ramAccounting += previousValue == null ? 0 : -(BASE_BYTES_PER_CHM_ENTRY + previousValue.ramBytesUsed() + uidRAMBytesUsed); - adjustRam(ramAccounting); - } - - void adjustRam(long value) { - if (value != 0) { - long v = current.ramBytesUsed.addAndGet(value); - assert v >= 0 : "bytes=" + v; - } + current.put(uid, version); } void remove(BytesRef uid, DeleteVersionValue deleted) { - VersionValue previousValue = current.remove(uid); + current.remove(uid); current.updateMinDeletedTimestamp(deleted); - if (previousValue != null) { - long uidRAMBytesUsed = BASE_BYTES_PER_BYTESREF + uid.bytes.length; - adjustRam(-(BASE_BYTES_PER_CHM_ENTRY + previousValue.ramBytesUsed() + uidRAMBytesUsed)); - } if (old != VersionLookup.EMPTY) { // we also need to remove it from the old map here to make sure we don't read this stale value while // we are in the middle of a refresh. Most of the time the old map is an empty map so we can skip it there. @@ -452,7 +472,7 @@ synchronized void clear() { @Override public long ramBytesUsed() { - return maps.ramBytesUsed() + ramBytesUsedTombstones.get(); + return maps.ramBytesUsed() + ramBytesUsedTombstones.get() + ramBytesUsedForArchive(); } /** @@ -463,6 +483,13 @@ long ramBytesUsedForRefresh() { return maps.current.ramBytesUsed.get(); } + /** + * Returns how much RAM would be freed up by cleaning out the LiveVersionMapArchive. + */ + long ramBytesUsedForArchive() { + return archive.getMemoryBytesUsed(); + } + /** * Returns how much RAM is current being freed up by refreshing. This is the RAM usage of the previous version map that needs to stay * around until operations are safely recorded in the Lucene index. diff --git a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMapArchive.java b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMapArchive.java index a68a1cea368d4..9ccbf6ac16fed 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMapArchive.java +++ b/server/src/main/java/org/elasticsearch/index/engine/LiveVersionMapArchive.java @@ -39,6 +39,14 @@ default boolean isUnsafe() { return false; } + /** + * Returns how much memory is currently being used by the archive and would be freed up after + * unpromotables are refreshed. + */ + default long getMemoryBytesUsed() { + return 0L; + } + LiveVersionMapArchive NOOP_ARCHIVE = new LiveVersionMapArchive() { @Override public void afterRefresh(LiveVersionMap.VersionLookup old) {} diff --git a/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java index fae3dd4069076..04ae0bb498841 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MatchNoneQueryBuilder.java @@ -39,14 +39,14 @@ public MatchNoneQueryBuilder(String rewriteReason) { */ public MatchNoneQueryBuilder(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { rewriteReason = in.readOptionalString(); } } @Override protected void doWriteTo(StreamOutput out) throws IOException { - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { out.writeOptionalString(rewriteReason); } } diff --git a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java index b2067549fab67..5a2b01838e27b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java @@ -91,7 +91,7 @@ public final class SimpleQueryStringBuilder extends AbstractQueryBuilder= 0 : reservedSize; this.sizeInBytes = sizeInBytes; this.totalDataSetSizeInBytes = totalDataSetSizeInBytes; - this.reservedSize = reservedSize; + this.reservedSizeInBytes = reservedSize; } public void add(StoreStats stats) { @@ -72,7 +72,7 @@ public void add(StoreStats stats) { } sizeInBytes += stats.sizeInBytes; totalDataSetSizeInBytes += stats.totalDataSetSizeInBytes; - reservedSize = ignoreIfUnknown(reservedSize) + ignoreIfUnknown(stats.reservedSize); + reservedSizeInBytes = ignoreIfUnknown(reservedSizeInBytes) + ignoreIfUnknown(stats.reservedSizeInBytes); } private static long ignoreIfUnknown(long reservedSize) { @@ -83,28 +83,20 @@ public long sizeInBytes() { return sizeInBytes; } - public long getSizeInBytes() { - return sizeInBytes; - } - public ByteSizeValue size() { return ByteSizeValue.ofBytes(sizeInBytes); } - public ByteSizeValue getSize() { - return size(); + public long totalDataSetSizeInBytes() { + return totalDataSetSizeInBytes; } public ByteSizeValue totalDataSetSize() { return ByteSizeValue.ofBytes(totalDataSetSizeInBytes); } - public ByteSizeValue getTotalDataSetSize() { - return totalDataSetSize(); - } - - public long totalDataSetSizeInBytes() { - return totalDataSetSizeInBytes; + public long reservedSizeInBytes() { + return reservedSizeInBytes; } /** @@ -113,7 +105,7 @@ public long totalDataSetSizeInBytes() { * the reserved size is unknown. */ public ByteSizeValue getReservedSize() { - return ByteSizeValue.ofBytes(reservedSize); + return ByteSizeValue.ofBytes(reservedSizeInBytes); } @Override @@ -123,7 +115,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVLong(totalDataSetSizeInBytes); } if (out.getTransportVersion().onOrAfter(RESERVED_BYTES_VERSION)) { - out.writeZLong(reservedSize); + out.writeZLong(reservedSizeInBytes); } } @@ -144,12 +136,12 @@ public boolean equals(Object o) { StoreStats that = (StoreStats) o; return sizeInBytes == that.sizeInBytes && totalDataSetSizeInBytes == that.totalDataSetSizeInBytes - && reservedSize == that.reservedSize; + && reservedSizeInBytes == that.reservedSizeInBytes; } @Override public int hashCode() { - return Objects.hash(sizeInBytes, totalDataSetSizeInBytes, reservedSize); + return Objects.hash(sizeInBytes, totalDataSetSizeInBytes, reservedSizeInBytes); } static final class Fields { diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java b/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java index de7bfa91dda42..4042b21b865e4 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesRequestCache.java @@ -16,7 +16,6 @@ import org.elasticsearch.common.cache.Cache; import org.elasticsearch.common.cache.CacheBuilder; import org.elasticsearch.common.cache.CacheLoader; -import org.elasticsearch.common.cache.RemovalListener; import org.elasticsearch.common.cache.RemovalNotification; import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader; import org.elasticsearch.common.settings.Setting; @@ -48,7 +47,7 @@ * There are still several TODOs left in this class, some easily addressable, some more complex, but the support * is functional. */ -public final class IndicesRequestCache implements RemovalListener, Closeable { +public final class IndicesRequestCache implements Closeable { /** * A setting to enable or disable request caching on an index level. Its dynamic by default @@ -73,18 +72,14 @@ public final class IndicesRequestCache implements RemovalListener registeredClosedListeners = ConcurrentCollections.newConcurrentMap(); private final Set keysToClean = ConcurrentCollections.newConcurrentSet(); - private final ByteSizeValue size; - private final TimeValue expire; private final Cache cache; IndicesRequestCache(Settings settings) { - this.size = INDICES_CACHE_QUERY_SIZE.get(settings); - this.expire = INDICES_CACHE_QUERY_EXPIRE.exists(settings) ? INDICES_CACHE_QUERY_EXPIRE.get(settings) : null; - long sizeInBytes = size.getBytes(); + TimeValue expire = INDICES_CACHE_QUERY_EXPIRE.exists(settings) ? INDICES_CACHE_QUERY_EXPIRE.get(settings) : null; CacheBuilder cacheBuilder = CacheBuilder.builder() - .setMaximumWeight(sizeInBytes) + .setMaximumWeight(INDICES_CACHE_QUERY_SIZE.get(settings).getBytes()) .weigher((k, v) -> k.ramBytesUsed() + v.ramBytesUsed()) - .removalListener(this); + .removalListener(notification -> notification.getKey().entity.onRemoval(notification)); if (expire != null) { cacheBuilder.setExpireAfterAccess(expire); } @@ -101,11 +96,6 @@ void clear(CacheEntity entity) { cleanCache(); } - @Override - public void onRemoval(RemovalNotification notification) { - notification.getKey().entity.onRemoval(notification); - } - BytesReference getOrCompute( CacheEntity cacheEntity, CheckedSupplier loader, diff --git a/server/src/main/java/org/elasticsearch/indices/IndicesService.java b/server/src/main/java/org/elasticsearch/indices/IndicesService.java index dbbf2bb98212a..ebe4652230327 100644 --- a/server/src/main/java/org/elasticsearch/indices/IndicesService.java +++ b/server/src/main/java/org/elasticsearch/indices/IndicesService.java @@ -64,6 +64,7 @@ import org.elasticsearch.common.util.concurrent.EsThreadPoolExecutor; import org.elasticsearch.common.util.iterable.Iterables; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.CheckedFunction; @@ -138,14 +139,12 @@ import org.elasticsearch.search.query.QueryPhase; import org.elasticsearch.search.query.QuerySearchResult; import org.elasticsearch.threadpool.ThreadPool; -import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import java.io.Closeable; import java.io.IOException; -import java.io.InputStream; import java.io.UncheckedIOException; import java.nio.file.Files; import java.util.ArrayList; @@ -1654,8 +1653,7 @@ public AliasFilter buildAliasFilter(ClusterState state, String index, Set filterParser = bytes -> { try ( - InputStream inputStream = bytes.streamInput(); - XContentParser parser = XContentFactory.xContentType(inputStream).xContent().createParser(parserConfig, inputStream) + XContentParser parser = XContentHelper.createParserNotCompressed(parserConfig, bytes, XContentHelper.xContentType(bytes)) ) { return parseTopLevelQuery(parser); } diff --git a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java index 9e995c084a555..43e6c02ebe3c6 100644 --- a/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java +++ b/server/src/main/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerService.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.ReleasableLock; import org.elasticsearch.core.Booleans; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.monitor.jvm.GcNames; import org.elasticsearch.monitor.jvm.JvmInfo; @@ -509,9 +510,12 @@ static OverLimitStrategy createOverLimitStrategy(boolean trackRealMemoryUsage) { if (trackRealMemoryUsage && jvmInfo.useG1GC().equals("true") // messing with GC is "dangerous" so we apply an escape hatch. Not intended to be used. && Booleans.parseBoolean(System.getProperty("es.real_memory_circuit_breaker.g1_over_limit_strategy.enabled"), true)) { - TimeValue lockTimeout = TimeValue.timeValueMillis( - Integer.parseInt(System.getProperty("es.real_memory_circuit_breaker.g1_over_limit_strategy.lock_timeout_ms", "500")) + + long lockTimeoutInMillis = Integer.parseInt( + System.getProperty("es.real_memory_circuit_breaker.g1_over_limit_strategy.lock_timeout_ms", "500") ); + TimeValue lockTimeout = TimeValue.timeValueMillis(lockTimeoutInMillis); + TimeValue fullGCLockTimeout = TimeValue.timeValueMillis(lockTimeoutInMillis); // hardcode interval, do not want any tuning of it outside code changes. return new G1OverLimitStrategy( jvmInfo, @@ -519,7 +523,9 @@ static OverLimitStrategy createOverLimitStrategy(boolean trackRealMemoryUsage) { createYoungGcCountSupplier(), System::currentTimeMillis, 500, - lockTimeout + 5000, + lockTimeout, + fullGCLockTimeout ); } else { return memoryUsed -> memoryUsed; @@ -552,10 +558,18 @@ static class G1OverLimitStrategy implements OverLimitStrategy { private final LongSupplier gcCountSupplier; private final LongSupplier timeSupplier; private final TimeValue lockTimeout; + + // The lock acquisition timeout when we are running a full GC + private final TimeValue fullGCLockTimeout; private final long maxHeap; private long lastCheckTime = Long.MIN_VALUE; + private long lastFullGCTime = Long.MIN_VALUE; private final long minimumInterval; + private volatile boolean performingFullGC = false; + + // Minimum interval before triggering another full GC + private final long fullGCMinimumInterval; private long blackHole; private final ReleasableLock lock = new ReleasableLock(new ReentrantLock()); @@ -568,14 +582,18 @@ static class G1OverLimitStrategy implements OverLimitStrategy { LongSupplier gcCountSupplier, LongSupplier timeSupplier, long minimumInterval, - TimeValue lockTimeout + long fullGCMinimumInterval, + TimeValue lockTimeout, + TimeValue fullGCLockTimeout ) { this.lockTimeout = lockTimeout; + this.fullGCLockTimeout = fullGCLockTimeout; assert minimumInterval > 0; this.currentMemoryUsageSupplier = currentMemoryUsageSupplier; this.gcCountSupplier = gcCountSupplier; this.timeSupplier = timeSupplier; this.minimumInterval = minimumInterval; + this.fullGCMinimumInterval = fullGCMinimumInterval; this.maxHeap = jvmInfo.getMem().getHeapMax().getBytes(); long g1RegionSize = jvmInfo.getG1RegionSize(); if (g1RegionSize <= 0) { @@ -602,50 +620,23 @@ static long fallbackRegionSize(JvmInfo jvmInfo) { return regionSize; } + @SuppressForbidden(reason = "Prefer full GC to OOM or CBE") + private static void performFullGC() { + System.gc(); + } + @Override public MemoryUsage overLimit(MemoryUsage memoryUsed) { - boolean leader = false; - int allocationIndex = 0; - long allocationDuration = 0; - long begin = 0; + + TriggerGCResult result = TriggerGCResult.EMPTY; int attemptNoCopy = 0; + try (ReleasableLock locked = lock.tryAcquire(lockTimeout)) { if (locked != null) { attemptNoCopy = ++this.attemptNo; - begin = timeSupplier.getAsLong(); - leader = begin >= lastCheckTime + minimumInterval; - overLimitTriggered(leader); - if (leader) { - long initialCollectionCount = gcCountSupplier.getAsLong(); - logger.info("attempting to trigger G1GC due to high heap usage [{}]", memoryUsed.baseUsage); - long localBlackHole = 0; - // number of allocations, corresponding to (approximately) number of free regions + 1 - int allocationCount = Math.toIntExact((maxHeap - memoryUsed.baseUsage) / g1RegionSize + 1); - // allocations of half-region size becomes single humongous alloc, thus taking up a full region. - int allocationSize = (int) (g1RegionSize >> 1); - long maxUsageObserved = memoryUsed.baseUsage; - for (; allocationIndex < allocationCount; ++allocationIndex) { - long current = currentMemoryUsageSupplier.getAsLong(); - if (current >= maxUsageObserved) { - maxUsageObserved = current; - } else { - // we observed a memory drop, so some GC must have occurred - break; - } - if (initialCollectionCount != gcCountSupplier.getAsLong()) { - break; - } - localBlackHole += new byte[allocationSize].hashCode(); - } - - blackHole += localBlackHole; - logger.trace("black hole [{}]", blackHole); - - long now = timeSupplier.getAsLong(); - this.lastCheckTime = now; - allocationDuration = now - begin; - this.attemptNo = 0; - } + result = tryTriggerGC(memoryUsed); + } else { + logger.info("could not acquire lock within {} when attempting to trigger G1GC due to high heap usage", lockTimeout); } } catch (InterruptedException e) { logger.info("could not acquire lock when attempting to trigger G1GC due to high heap usage"); @@ -653,20 +644,45 @@ public MemoryUsage overLimit(MemoryUsage memoryUsed) { // fallthrough } + if (performingFullGC && attemptNoCopy == 0) { + // Another thread is currently performing a full GC, and we were not able to try (lock acquire timeout) + // Since the full GC thread may hold the lock for longer, try again for an additional timeout + logger.info( + "could not acquire lock within {} while another thread was performing a full GC, waiting again for {}", + lockTimeout, + fullGCLockTimeout + ); + try (ReleasableLock locked = lock.tryAcquire(fullGCLockTimeout)) { + if (locked != null) { + attemptNoCopy = ++this.attemptNo; + result = tryTriggerGC(memoryUsed); + } else { + logger.info( + "could not acquire lock within {} when attempting to trigger G1GC due to high heap usage", + fullGCLockTimeout + ); + } + } catch (InterruptedException e) { + logger.info("could not acquire lock when attempting to trigger G1GC due to high heap usage"); + Thread.currentThread().interrupt(); + // fallthrough + } + } + final long current = currentMemoryUsageSupplier.getAsLong(); if (current < memoryUsed.baseUsage) { - if (leader) { + if (result.gcAttempted()) { logger.info( "GC did bring memory usage down, before [{}], after [{}], allocations [{}], duration [{}]", memoryUsed.baseUsage, current, - allocationIndex, - allocationDuration + result.allocationIndex(), + result.allocationDuration() ); } else if (attemptNoCopy < 10 || Long.bitCount(attemptNoCopy) == 1) { logger.info( "memory usage down after [{}], before [{}], after [{}]", - begin - lastCheckTime, + result.timeSinceLastCheck(), memoryUsed.baseUsage, current ); @@ -678,18 +694,18 @@ public MemoryUsage overLimit(MemoryUsage memoryUsed) { memoryUsed.permanentChildUsage ); } else { - if (leader) { + if (result.gcAttempted()) { logger.info( "GC did not bring memory usage down, before [{}], after [{}], allocations [{}], duration [{}]", memoryUsed.baseUsage, current, - allocationIndex, - allocationDuration + result.allocationIndex(), + result.allocationDuration() ); } else if (attemptNoCopy < 10 || Long.bitCount(attemptNoCopy) == 1) { logger.info( "memory usage not down after [{}], before [{}], after [{}]", - begin - lastCheckTime, + result.timeSinceLastCheck(), memoryUsed.baseUsage, current ); @@ -699,6 +715,66 @@ public MemoryUsage overLimit(MemoryUsage memoryUsed) { } } + private TriggerGCResult tryTriggerGC(MemoryUsage memoryUsed) { + long begin = timeSupplier.getAsLong(); + boolean canPerformGC = begin >= lastCheckTime + minimumInterval; + int allocationIndex = 0; + + overLimitTriggered(canPerformGC); + + if (canPerformGC) { + long initialCollectionCount = gcCountSupplier.getAsLong(); + logger.info("attempting to trigger G1GC due to high heap usage [{}]", memoryUsed.baseUsage); + long localBlackHole = 0; + // number of allocations, corresponding to (approximately) number of free regions + 1 + int allocationCount = Math.toIntExact((maxHeap - memoryUsed.baseUsage) / g1RegionSize + 1); + // allocations of half-region size becomes single humongous alloc, thus taking up a full region. + int allocationSize = (int) (g1RegionSize >> 1); + long maxUsageObserved = memoryUsed.baseUsage; + for (; allocationIndex < allocationCount; ++allocationIndex) { + long current = currentMemoryUsageSupplier.getAsLong(); + if (current >= maxUsageObserved) { + maxUsageObserved = current; + } else { + // we observed a memory drop, so some GC must have occurred + break; + } + if (initialCollectionCount != gcCountSupplier.getAsLong()) { + break; + } + localBlackHole += new byte[allocationSize].hashCode(); + } + + blackHole += localBlackHole; + logger.trace("black hole [{}]", blackHole); + + this.lastCheckTime = timeSupplier.getAsLong(); + this.attemptNo = 0; + } + + long reclaimedMemory = memoryUsed.baseUsage - currentMemoryUsageSupplier.getAsLong(); + // TODO: use a threshold? Relative to % of memory? + if (reclaimedMemory <= 0) { + long now = timeSupplier.getAsLong(); + boolean canPerformFullGC = now >= lastFullGCTime + fullGCMinimumInterval; + if (canPerformFullGC) { + // Enough time passed between 2 full GC fallbacks + performingFullGC = true; + logger.info("attempt to trigger young GC failed to bring memory down, triggering full GC"); + performFullGC(); + performingFullGC = false; + this.lastFullGCTime = timeSupplier.getAsLong(); + } + } + + long allocationDuration = timeSupplier.getAsLong() - begin; + return new TriggerGCResult(canPerformGC, allocationIndex, allocationDuration, begin - lastCheckTime); + } + + private record TriggerGCResult(boolean gcAttempted, int allocationIndex, long allocationDuration, long timeSinceLastCheck) { + private static final TriggerGCResult EMPTY = new TriggerGCResult(false, 0, 0, 0); + } + void overLimitTriggered(boolean leader) { // for tests to override. } diff --git a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java index 1b2d8056ac437..31d947d548ccf 100644 --- a/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java +++ b/server/src/main/java/org/elasticsearch/ingest/IngestDocument.java @@ -19,6 +19,7 @@ import org.elasticsearch.index.mapper.SourceFieldMapper; import org.elasticsearch.index.mapper.VersionFieldMapper; import org.elasticsearch.script.CtxMap; +import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.TemplateScript; import java.time.ZoneOffset; @@ -189,18 +190,6 @@ public T getFieldValue(String path, Class clazz, boolean ignoreMissing) { return cast(path, context, clazz); } - /** - * Returns the value contained in the document with the provided templated path - * @param pathTemplate The path within the document in dot-notation - * @param clazz The expected class of the field value - * @return the value for the provided path if existing, null otherwise - * @throws IllegalArgumentException if the pathTemplate is null, empty, invalid, if the field doesn't exist, - * or if the field that is found at the provided path is not of the expected type. - */ - public T getFieldValue(TemplateScript.Factory pathTemplate, Class clazz) { - return getFieldValue(renderTemplate(pathTemplate), clazz); - } - /** * Returns the value contained in the document for the provided path as a byte array. * If the path value is a string, a base64 decode operation will happen. @@ -239,16 +228,6 @@ public byte[] getFieldValueAsBytes(String path, boolean ignoreMissing) { } } - /** - * Checks whether the document contains a value for the provided templated path - * @param fieldPathTemplate the template for the path within the document in dot-notation - * @return true if the document contains a value for the field, false otherwise - * @throws IllegalArgumentException if the path is null, empty or invalid - */ - public boolean hasField(TemplateScript.Factory fieldPathTemplate) { - return hasField(renderTemplate(fieldPathTemplate)); - } - /** * Checks whether the document contains a value for the provided path * @param path The path within the document in dot-notation @@ -329,15 +308,6 @@ public boolean hasField(String path, boolean failOutOfRange) { return false; } - /** - * Removes the field identified by the provided path. - * @param fieldPathTemplate Resolves to the path with dot-notation within the document - * @throws IllegalArgumentException if the path is null, empty, invalid or if the field doesn't exist. - */ - public void removeField(TemplateScript.Factory fieldPathTemplate) { - removeField(renderTemplate(fieldPathTemplate)); - } - /** * Removes the field identified by the provided path. * @param path the path of the field to be removed @@ -468,17 +438,13 @@ public void appendFieldValue(String path, Object value, boolean allowDuplicates) * the provided value will be added to the newly created list. * Supports multiple values too provided in forms of list, in that case all the values will be appended to the * existing (or newly created) list. - * @param fieldPathTemplate Resolves to the path with dot-notation within the document + * @param path The path within the document in dot-notation * @param valueSource The value source that will produce the value or values to append to the existing ones * @param allowDuplicates When false, any values that already exist in the field will not be added * @throws IllegalArgumentException if the path is null, empty or invalid. */ - public void appendFieldValue(TemplateScript.Factory fieldPathTemplate, ValueSource valueSource, boolean allowDuplicates) { - appendFieldValue( - fieldPathTemplate.newInstance(templateModel).execute(), - valueSource.copyAndResolve(templateModel), - allowDuplicates - ); + public void appendFieldValue(String path, ValueSource valueSource, boolean allowDuplicates) { + appendFieldValue(path, valueSource.copyAndResolve(templateModel), allowDuplicates); } /** @@ -499,26 +465,26 @@ public void setFieldValue(String path, Object value) { * Sets the provided value to the provided path in the document. * Any non existing path element will be created. If the last element is a list, * the value will replace the existing list. - * @param fieldPathTemplate Resolves to the path with dot-notation within the document + * @param path The path within the document in dot-notation * @param valueSource The value source that will produce the value to put in for the path key * @throws IllegalArgumentException if the path is null, empty, invalid or if the value cannot be set to the * item identified by the provided path. */ - public void setFieldValue(TemplateScript.Factory fieldPathTemplate, ValueSource valueSource) { - setFieldValue(fieldPathTemplate.newInstance(templateModel).execute(), valueSource.copyAndResolve(templateModel)); + public void setFieldValue(String path, ValueSource valueSource) { + setFieldValue(path, valueSource.copyAndResolve(templateModel)); } /** * Sets the provided value to the provided path in the document. * Any non existing path element will be created. If the last element is a list, * the value will replace the existing list. - * @param fieldPathTemplate Resolves to the path with dot-notation within the document + * @param path The path within the document in dot-notation * @param valueSource The value source that will produce the value to put in for the path key * @param ignoreEmptyValue The flag to determine whether to exit quietly when the value produced by TemplatedValue is null or empty * @throws IllegalArgumentException if the path is null, empty, invalid or if the value cannot be set to the * item identified by the provided path. */ - public void setFieldValue(TemplateScript.Factory fieldPathTemplate, ValueSource valueSource, boolean ignoreEmptyValue) { + public void setFieldValue(String path, ValueSource valueSource, boolean ignoreEmptyValue) { Object value = valueSource.copyAndResolve(templateModel); if (ignoreEmptyValue && valueSource instanceof ValueSource.TemplatedValue) { if (value == null) { @@ -530,20 +496,20 @@ public void setFieldValue(TemplateScript.Factory fieldPathTemplate, ValueSource } } - setFieldValue(fieldPathTemplate.newInstance(templateModel).execute(), value); + setFieldValue(path, value); } /** * Sets the provided value to the provided path in the document. * Any non existing path element will be created. If the last element is a list, * the value will replace the existing list. - * @param fieldPathTemplate Resolves to the path with dot-notation within the document + * @param path The path within the document in dot-notation * @param value The value to put in for the path key * @param ignoreEmptyValue The flag to determine whether to exit quietly when the value produced by TemplatedValue is null or empty * @throws IllegalArgumentException if the path is null, empty, invalid or if the value cannot be set to the * item identified by the provided path. */ - public void setFieldValue(TemplateScript.Factory fieldPathTemplate, Object value, boolean ignoreEmptyValue) { + public void setFieldValue(String path, Object value, boolean ignoreEmptyValue) { if (ignoreEmptyValue) { if (value == null) { return; @@ -555,7 +521,7 @@ public void setFieldValue(TemplateScript.Factory fieldPathTemplate, Object value } } - setFieldValue(fieldPathTemplate.newInstance(templateModel).execute(), value); + setFieldValue(path, value); } private void setFieldValue(String path, Object value, boolean append, boolean allowDuplicates) { @@ -724,6 +690,21 @@ private static T cast(String path, Object object, Class clazz) { ); } + /** + * Renders a template into a string. This allows field access via both literal fields like {@code "foo.bar.baz"} and dynamic fields + * like {@code "{{other_field}}"} (that is, look up the value of the 'other_field' in the document and then use the resulting string as + * the field to operate on). + *

+ * See {@link ConfigurationUtils#compileTemplate(String, String, String, String, ScriptService)} and associated methods, which + * create these {@link TemplateScript.Factory} instances. + *

+ * Note: for clarity and efficiency reasons, it is advisable to invoke this method outside IngestDocument itself -- fields should be + * rendered by a caller (once), and then passed to an ingest document repeatedly. There are enough methods on IngestDocument that + * operate on String paths already, we don't want to mirror all of them with twin methods that accept a template. + * + * @param template the template or literal string to evaluate + * @return a literal string field path + */ public String renderTemplate(TemplateScript.Factory template) { return template.newInstance(templateModel).execute(); } diff --git a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java index e36f1a085fbde..a7d93ec7e7d80 100644 --- a/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java +++ b/server/src/main/java/org/elasticsearch/monitor/metrics/NodeMetrics.java @@ -62,7 +62,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { this.stats = new NodeStatsCache(TimeValue.timeValueMinutes(1)); metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.get.total", + "es.indices.get.total", "Total number of get operations", "operation", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getGet().getCount()) @@ -71,7 +71,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.get.time", + "es.indices.get.time", "Time in milliseconds spent performing get operations.", "milliseconds", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getGet().getTimeInMillis()) @@ -80,7 +80,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.search.fetch.total", + "es.indices.search.fetch.total", "Total number of fetch operations.", "operation", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getSearch().getTotal().getFetchCount()) @@ -89,7 +89,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.search.fetch.time", + "es.indices.search.fetch.time", "Time in milliseconds spent performing fetch operations.", "milliseconds", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getSearch().getTotal().getFetchTimeInMillis()) @@ -98,7 +98,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.merge.total", + "es.indices.merge.total", "Total number of merge operations.", "operation", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getMerge().getTotal()) @@ -107,7 +107,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.merge.time", + "es.indices.merge.time", "Time in milliseconds spent performing merge operations.", "milliseconds", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getMerge().getTotalTimeInMillis()) @@ -116,7 +116,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.indices.translog.operations", + "es.translog.operations.count", "Number of transaction log operations.", "operation", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().estimatedNumberOfOperations()) @@ -125,7 +125,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.indices.translog.size", + "es.translog.size", "Size, in bytes, of the transaction log.", "bytes", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getTranslogSizeInBytes()) @@ -134,7 +134,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.indices.translog.uncommitted_operations", + "es.translog.uncommitted_operations.count", "Number of uncommitted transaction log operations.", "operations", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getUncommittedOperations()) @@ -143,7 +143,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.indices.translog.uncommitted_size", + "es.translog.uncommitted_operations.size", "Size, in bytes, of uncommitted transaction log operations.", "bytes", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getUncommittedSizeInBytes()) @@ -152,7 +152,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.indices.translog.earliest_last_modified_age", + "es.translog.earliest_last_modified.time", "Earliest last modified age for the transaction log.", "time", () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getTranslog().getEarliestLastModifiedAge()) @@ -161,7 +161,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.transport.rx_size", + "es.transport.rx.size", "Size, in bytes, of RX packets received by the node during internal cluster communication.", "bytes", () -> new LongWithAttributes(stats.getOrRefresh().getTransport().getRxSize().getBytes()) @@ -170,7 +170,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.transport.tx_size", + "es.transport.tx.size", "Size, in bytes, of TX packets sent by the node during internal cluster communication.", "bytes", () -> new LongWithAttributes(stats.getOrRefresh().getTransport().getTxSize().getBytes()) @@ -179,7 +179,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.jvm.mem.pools.young.used", + "es.jvm.mem.pools.young.size", "Memory, in bytes, used by the young generation heap.", "bytes", () -> new LongWithAttributes(bytesUsedByGCGen(stats.getOrRefresh().getJvm().getMem(), GcNames.YOUNG)) @@ -188,7 +188,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.jvm.mem.pools.survivor.used", + "es.jvm.mem.pools.survivor.size", "Memory, in bytes, used by the survivor space.", "bytes", () -> new LongWithAttributes(bytesUsedByGCGen(stats.getOrRefresh().getJvm().getMem(), GcNames.SURVIVOR)) @@ -197,7 +197,7 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongGauge( - "es.node.stats.jvm.mem.pools.old.used", + "es.jvm.mem.pools.old.size", "Memory, in bytes, used by the old generation heap.", "bytes", () -> new LongWithAttributes(bytesUsedByGCGen(stats.getOrRefresh().getJvm().getMem(), GcNames.OLD)) @@ -206,12 +206,193 @@ private void registerAsyncMetrics(MeterRegistry registry) { metrics.add( registry.registerLongAsyncCounter( - "es.node.stats.fs.io_stats.io_time.total", + "es.fs.io_stats.time.total", "The total time in millis spent performing I/O operations across all devices used by Elasticsearch.", "milliseconds", () -> new LongWithAttributes(stats.getOrRefresh().getFs().getIoStats().getTotalIOTimeMillis()) ) ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indexing.docs.total", + "Total number of indexed documents", + "documents", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getIndexCount()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.indexing.docs.count", + "Current number of indexing documents", + "documents", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getIndexCurrent()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indices.indexing.failed.total", + "Total number of failed indexing operations", + "operations", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getIndexFailedCount()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indices.deletion.docs.total", + "Total number of deleted documents", + "documents", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getDeleteCount()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.indices.deletion.docs.count", + "Current number of deleting documents", + "documents", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getDeleteCurrent()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indices.indexing.time", + "Total indices indexing time", + "milliseconds", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getIndexTime().millis()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indices.deletion.time", + "Total indices deletion time", + "milliseconds", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getDeleteTime().millis()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indices.throttle.time", + "Total indices throttle time", + "milliseconds", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getThrottleTime().millis()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indices.noop.total", + "Total number of noop shard operations", + "operations", + () -> new LongWithAttributes(stats.getOrRefresh().getIndices().getIndexing().getTotal().getNoopUpdateCount()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indexing.coordinating_operations.size", + "Total number of memory bytes consumed by coordinating operations", + "bytes", + () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getTotalCoordinatingBytes()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indexing.coordinating_operations.total", + "Total number of coordinating operations", + "operations", + () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getTotalCoordinatingOps()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.indexing.coordinating_operations.size", + "Current number of memory bytes consumed by coordinating operations", + "bytes", + () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCurrentCoordinatingBytes()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.indexing.coordinating_operations.count", + "Current number of coordinating operations", + "operations", + () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCurrentCoordinatingOps()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indexing.coordinating_operations.rejections.total", + "Total number of coordinating operations rejections", + "operations", + () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCoordinatingRejections()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indexing.primary_operations.size", + "Total number of memory bytes consumed by primary operations", + "bytes", + () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getTotalPrimaryBytes()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indexing.primary_operations.total", + "Total number of primary operations", + "operations", + () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getTotalPrimaryOps()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.indexing.primary_operations.size", + "Current number of memory bytes consumed by primary operations", + "bytes", + () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCurrentPrimaryBytes()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.indexing.primary_operations.count", + "Current number of primary operations", + "operations", + () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getCurrentPrimaryOps()) + ) + ); + + metrics.add( + registry.registerLongAsyncCounter( + "es.indexing.primary_operations.rejections.total", + "Total number of primary operations rejections", + "operations", + () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getPrimaryRejections()) + ) + ); + + metrics.add( + registry.registerLongGauge( + "es.indexing.memory.limit.size", + "Current memory limit for primary and coordinating operations", + "bytes", + () -> new LongWithAttributes(stats.getOrRefresh().getIndexingPressureStats().getMemoryLimit()) + ) + ); + } /** @@ -238,6 +419,7 @@ private long bytesUsedByGCGen(JvmStats.Mem mem, String name) { */ private NodeStats getNodeStats() { CommonStatsFlags flags = new CommonStatsFlags( + CommonStatsFlags.Flag.Indexing, CommonStatsFlags.Flag.Get, CommonStatsFlags.Flag.Search, CommonStatsFlags.Flag.Merge, @@ -259,7 +441,7 @@ private NodeStats getNodeStats() { false, false, false, - false, + true, false ); } diff --git a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java index 7a0d8c941e50f..0de5657c0cb1a 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeConstruction.java +++ b/server/src/main/java/org/elasticsearch/node/NodeConstruction.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.ingest.ReservedPipelineAction; import org.elasticsearch.action.search.SearchExecutionStatsCollector; import org.elasticsearch.action.search.SearchPhaseController; +import org.elasticsearch.action.search.SearchTransportAPMMetrics; import org.elasticsearch.action.search.SearchTransportService; import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.action.update.UpdateHelper; @@ -201,6 +202,7 @@ import java.util.Collection; import java.util.LinkedHashSet; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Optional; @@ -383,6 +385,7 @@ private Settings createEnvironment(Environment initialEnvironment, NodeServicePr ); logger.info("JVM home [{}], using bundled JDK [{}]", System.getProperty("java.home"), jvmInfo.getUsingBundledJdk()); logger.info("JVM arguments {}", Arrays.toString(jvmInfo.getInputArguments())); + logger.info("Default Locale [{}]", Locale.getDefault()); if (Build.current().isProductionRelease() == false) { logger.warn( "version [{}] is a pre-release version of Elasticsearch and is not suitable for production", @@ -870,6 +873,7 @@ record PluginServiceInstances( telemetryProvider.getTracer() ); final ResponseCollectorService responseCollectorService = new ResponseCollectorService(clusterService); + final SearchTransportAPMMetrics searchTransportAPMMetrics = new SearchTransportAPMMetrics(telemetryProvider.getMeterRegistry()); final SearchTransportService searchTransportService = new SearchTransportService( transportService, client, @@ -995,7 +999,15 @@ record PluginServiceInstances( modules.add( loadPluginShutdownService(clusterService), - loadDiagnosticServices(settings, discoveryModule.getCoordinator(), clusterService, transportService, featureService, threadPool) + loadDiagnosticServices( + settings, + discoveryModule.getCoordinator(), + clusterService, + transportService, + featureService, + threadPool, + telemetryProvider + ) ); RecoveryPlannerService recoveryPlannerService = getRecoveryPlannerService(threadPool, clusterService, repositoryService); @@ -1038,6 +1050,7 @@ record PluginServiceInstances( b.bind(MetadataCreateIndexService.class).toInstance(metadataCreateIndexService); b.bind(MetadataUpdateSettingsService.class).toInstance(metadataUpdateSettingsService); b.bind(SearchService.class).toInstance(searchService); + b.bind(SearchTransportAPMMetrics.class).toInstance(searchTransportAPMMetrics); b.bind(SearchTransportService.class).toInstance(searchTransportService); b.bind(SearchPhaseController.class).toInstance(new SearchPhaseController(searchService::aggReduceContextBuilder)); b.bind(Transport.class).toInstance(transport); @@ -1132,7 +1145,8 @@ private Module loadDiagnosticServices( ClusterService clusterService, TransportService transportService, FeatureService featureService, - ThreadPool threadPool + ThreadPool threadPool, + TelemetryProvider telemetryProvider ) { MasterHistoryService masterHistoryService = new MasterHistoryService(transportService, threadPool, clusterService); @@ -1156,7 +1170,13 @@ private Module loadDiagnosticServices( Stream.concat(serverHealthIndicatorServices, pluginHealthIndicatorServices).toList(), threadPool ); - HealthPeriodicLogger healthPeriodicLogger = HealthPeriodicLogger.create(settings, clusterService, client, healthService); + HealthPeriodicLogger healthPeriodicLogger = HealthPeriodicLogger.create( + settings, + clusterService, + client, + healthService, + telemetryProvider + ); HealthMetadataService healthMetadataService = HealthMetadataService.create(clusterService, featureService, settings); LocalHealthMonitor localHealthMonitor = LocalHealthMonitor.create( settings, diff --git a/server/src/main/java/org/elasticsearch/node/NodeService.java b/server/src/main/java/org/elasticsearch/node/NodeService.java index e2283ea9851d7..4b9e5dc83c538 100644 --- a/server/src/main/java/org/elasticsearch/node/NodeService.java +++ b/server/src/main/java/org/elasticsearch/node/NodeService.java @@ -62,6 +62,7 @@ public class NodeService implements Closeable { private final AggregationUsageService aggregationUsageService; private final Coordinator coordinator; private final RepositoriesService repositoriesService; + private final Map componentVersions; NodeService( Settings settings, @@ -100,6 +101,7 @@ public class NodeService implements Closeable { this.indexingPressure = indexingPressure; this.aggregationUsageService = aggregationUsageService; this.repositoriesService = repositoriesService; + this.componentVersions = findComponentVersions(pluginService); clusterService.addStateApplier(ingestService); } @@ -122,7 +124,7 @@ public NodeInfo info( Version.CURRENT.toString(), TransportVersion.current(), IndexVersion.current(), - findComponentVersions(), + componentVersions, Build.current(), transportService.getLocalNode(), settings ? settingsFilter.filter(this.settings) : null, @@ -140,7 +142,7 @@ public NodeInfo info( ); } - private Map findComponentVersions() { + private static Map findComponentVersions(PluginsService pluginService) { var versions = pluginService.loadServiceProviders(ComponentVersionNumber.class) .stream() .collect(Collectors.toUnmodifiableMap(ComponentVersionNumber::componentId, cvn -> cvn.versionNumber().id())); diff --git a/server/src/main/java/org/elasticsearch/plugins/ExtensionLoader.java b/server/src/main/java/org/elasticsearch/plugins/ExtensionLoader.java index 7dfb64c989ea2..5cf5f1b92e472 100644 --- a/server/src/main/java/org/elasticsearch/plugins/ExtensionLoader.java +++ b/server/src/main/java/org/elasticsearch/plugins/ExtensionLoader.java @@ -8,9 +8,10 @@ package org.elasticsearch.plugins; -import java.util.Locale; +import org.elasticsearch.core.Strings; + +import java.util.Optional; import java.util.ServiceLoader; -import java.util.function.Supplier; /** * A utility for loading SPI extensions. @@ -20,8 +21,7 @@ public class ExtensionLoader { /** * Loads a single SPI extension. * - * There should be no more than one extension found. If no service providers - * are found, the supplied fallback is used. + * There should be no more than one extension found. * * Note: A ServiceLoader is needed rather than the service class because ServiceLoaders * must be loaded by a module with the {@code uses} declaration. Since this @@ -29,21 +29,22 @@ public class ExtensionLoader { * service classes it may load. Thus, the caller must load the ServiceLoader. * * @param loader a service loader instance to find the singleton extension in - * @param fallback a supplier for an instance if no extensions are found * @return an instance of the extension * @param the SPI extension type */ - public static T loadSingleton(ServiceLoader loader, Supplier fallback) { - var extensions = loader.stream().toList(); - if (extensions.size() > 1) { + public static Optional loadSingleton(ServiceLoader loader) { + var extensions = loader.iterator(); + if (extensions.hasNext() == false) { + return Optional.empty(); + } + var ext = extensions.next(); + if (extensions.hasNext()) { // It would be really nice to give the actual extension class here directly, but that would require passing it // in effectively twice in the call site, once to ServiceLoader, and then to this method directly as well. // It's annoying that ServiceLoader hangs onto the service class, but does not expose it. It does at least // print the service class from its toString, which is better tha nothing - throw new IllegalStateException(String.format(Locale.ROOT, "More than one extension found for %s", loader)); - } else if (extensions.isEmpty()) { - return fallback.get(); + throw new IllegalStateException(Strings.format("More than one extension found for %s", loader)); } - return extensions.get(0).get(); + return Optional.of(ext); } } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index c45a048480383..fadb550f63fd6 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -177,9 +177,9 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp protected final ThreadPool threadPool; - public static final String STATELESS_SHARD_THREAD_NAME = "stateless_shard"; + public static final String STATELESS_SHARD_READ_THREAD_NAME = "stateless_shard_read"; public static final String STATELESS_TRANSLOG_THREAD_NAME = "stateless_translog"; - public static final String STATELESS_UPLOAD_THREAD_NAME = "stateless_upload"; + public static final String STATELESS_SHARD_WRITE_THREAD_NAME = "stateless_shard_write"; public static final String SNAPSHOT_PREFIX = "snap-"; @@ -1984,9 +1984,9 @@ protected void assertSnapshotOrGenericThread() { ThreadPool.Names.SNAPSHOT, ThreadPool.Names.SNAPSHOT_META, ThreadPool.Names.GENERIC, - STATELESS_SHARD_THREAD_NAME, + STATELESS_SHARD_READ_THREAD_NAME, STATELESS_TRANSLOG_THREAD_NAME, - STATELESS_UPLOAD_THREAD_NAME + STATELESS_SHARD_WRITE_THREAD_NAME ); } diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java index ca3ff799436c2..e7ea234eae310 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/ChecksumBlobStoreFormat.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.lucene.store.IndexOutputOutputStream; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParserUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.gateway.CorruptStateException; @@ -33,6 +34,7 @@ import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import java.io.FilterInputStream; @@ -144,15 +146,23 @@ public T deserialize(String repoName, NamedXContentRegistry namedXContentRegistr BytesReference bytesReference = Streams.readFully(wrappedStream); deserializeMetaBlobInputStream.verifyFooter(); try ( - XContentParser parser = XContentType.SMILE.xContent() - .createParser(namedXContentRegistry, LoggingDeprecationHandler.INSTANCE, bytesReference.streamInput()) + XContentParser parser = XContentHelper.createParserNotCompressed( + XContentParserConfiguration.EMPTY.withRegistry(namedXContentRegistry) + .withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), + bytesReference, + XContentType.SMILE + ) ) { result = reader.apply(repoName, parser); XContentParserUtils.ensureExpectedToken(null, parser.nextToken(), parser); } catch (Exception e) { try ( - XContentParser parser = XContentType.SMILE.xContent() - .createParser(namedXContentRegistry, LoggingDeprecationHandler.INSTANCE, bytesReference.streamInput()) + XContentParser parser = XContentHelper.createParserNotCompressed( + XContentParserConfiguration.EMPTY.withRegistry(namedXContentRegistry) + .withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), + bytesReference, + XContentType.SMILE + ) ) { result = fallbackReader.apply(repoName, parser); XContentParserUtils.ensureExpectedToken(null, parser.nextToken(), parser); diff --git a/server/src/main/java/org/elasticsearch/rest/RestRequest.java b/server/src/main/java/org/elasticsearch/rest/RestRequest.java index eac13e5ef87a6..09eb83d109e3e 100644 --- a/server/src/main/java/org/elasticsearch/rest/RestRequest.java +++ b/server/src/main/java/org/elasticsearch/rest/RestRequest.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Booleans; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.Nullable; @@ -23,6 +24,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.http.HttpChannel; import org.elasticsearch.http.HttpRequest; +import org.elasticsearch.telemetry.tracing.Traceable; import org.elasticsearch.xcontent.ParsedMediaType; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContent; @@ -31,7 +33,6 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; -import java.io.InputStream; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -46,7 +47,7 @@ import static org.elasticsearch.common.unit.ByteSizeValue.parseBytesSizeValue; import static org.elasticsearch.core.TimeValue.parseTimeValue; -public class RestRequest implements ToXContent.Params { +public class RestRequest implements ToXContent.Params, Traceable { public static final String RESPONSE_RESTRICTED = "responseRestricted"; // tchar pattern as defined by RFC7230 section 3.2.6 @@ -541,12 +542,7 @@ public final XContentParser contentOrSourceParamParser() throws IOException { public final void withContentOrSourceParamParserOrNull(CheckedConsumer withParser) throws IOException { if (hasContentOrSourceParam()) { Tuple tuple = contentOrSourceParam(); - BytesReference content = tuple.v2(); - XContentType xContentType = tuple.v1(); - try ( - InputStream stream = content.streamInput(); - XContentParser parser = xContentType.xContent().createParser(parserConfig, stream) - ) { + try (XContentParser parser = XContentHelper.createParserNotCompressed(parserConfig, tuple.v2(), tuple.v1())) { withParser.accept(parser); } } else { @@ -631,6 +627,11 @@ public void markResponseRestricted(String restriction) { consumedParams.add(RESPONSE_RESTRICTED); } + @Override + public String getSpanId() { + return "rest-" + getRequestId(); + } + public static class MediaTypeHeaderException extends RuntimeException { private final String message; diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCloneSnapshotAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCloneSnapshotAction.java index c6790e7de21e6..7785680a3ca8d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCloneSnapshotAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestCloneSnapshotAction.java @@ -43,15 +43,17 @@ public String getName() { @Override public RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - final Map source = request.contentParser().map(); - final CloneSnapshotRequest cloneSnapshotRequest = new CloneSnapshotRequest( - request.param("repository"), - request.param("snapshot"), - request.param("target_snapshot"), - XContentMapValues.nodeStringArrayValue(source.getOrDefault("indices", Collections.emptyList())) - ); - cloneSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", cloneSnapshotRequest.masterNodeTimeout())); - cloneSnapshotRequest.indicesOptions(IndicesOptions.fromMap(source, cloneSnapshotRequest.indicesOptions())); - return channel -> client.admin().cluster().cloneSnapshot(cloneSnapshotRequest, new RestToXContentListener<>(channel)); + try (var parser = request.contentParser()) { + final Map source = parser.map(); + final CloneSnapshotRequest cloneSnapshotRequest = new CloneSnapshotRequest( + request.param("repository"), + request.param("snapshot"), + request.param("target_snapshot"), + XContentMapValues.nodeStringArrayValue(source.getOrDefault("indices", Collections.emptyList())) + ); + cloneSnapshotRequest.masterNodeTimeout(request.paramAsTime("master_timeout", cloneSnapshotRequest.masterNodeTimeout())); + cloneSnapshotRequest.indicesOptions(IndicesOptions.fromMap(source, cloneSnapshotRequest.indicesOptions())); + return channel -> client.admin().cluster().cloneSnapshot(cloneSnapshotRequest, new RestToXContentListener<>(channel)); + } } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java index 095abcd14d355..76df8af1889a7 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestNodesHotThreadsAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsRequest; import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse; +import org.elasticsearch.action.admin.cluster.node.hotthreads.TransportNodesHotThreadsAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.Strings; import org.elasticsearch.core.RestApiVersion; @@ -112,7 +113,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC nodesHotThreadsRequest.interval(TimeValue.parseTimeValue(request.param("interval"), nodesHotThreadsRequest.interval(), "interval")); nodesHotThreadsRequest.snapshots(request.paramAsInt("snapshots", nodesHotThreadsRequest.snapshots())); nodesHotThreadsRequest.timeout(request.param("timeout")); - return channel -> client.admin().cluster().nodesHotThreads(nodesHotThreadsRequest, new RestResponseListener<>(channel) { + return channel -> client.execute(TransportNodesHotThreadsAction.TYPE, nodesHotThreadsRequest, new RestResponseListener<>(channel) { @Override public RestResponse buildResponse(NodesHotThreadsResponse response) { return RestResponse.chunked(RestStatus.OK, fromTextChunks(TEXT_CONTENT_TYPE, response.getTextChunks(), null)); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java index 8442507c36b1c..e9f9b9bf4327d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/cluster/RestPendingClusterTasksAction.java @@ -9,6 +9,7 @@ package org.elasticsearch.rest.action.admin.cluster; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequest; +import org.elasticsearch.action.admin.cluster.tasks.TransportPendingClusterTasksAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; @@ -39,8 +40,10 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC PendingClusterTasksRequest pendingClusterTasksRequest = new PendingClusterTasksRequest(); pendingClusterTasksRequest.masterNodeTimeout(request.paramAsTime("master_timeout", pendingClusterTasksRequest.masterNodeTimeout())); pendingClusterTasksRequest.local(request.paramAsBoolean("local", pendingClusterTasksRequest.local())); - return channel -> client.admin() - .cluster() - .pendingClusterTasks(pendingClusterTasksRequest, new RestChunkedToXContentListener<>(channel)); + return channel -> client.execute( + TransportPendingClusterTasksAction.TYPE, + pendingClusterTasksRequest, + new RestChunkedToXContentListener<>(channel) + ); } } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutComponentTemplateAction.java index 337283ebf1958..fd6f529d876a2 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutComponentTemplateAction.java @@ -43,7 +43,9 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC putRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRequest.masterNodeTimeout())); putRequest.create(request.paramAsBoolean("create", false)); putRequest.cause(request.param("cause", "api")); - putRequest.componentTemplate(ComponentTemplate.parse(request.contentParser())); + try (var parser = request.contentParser()) { + putRequest.componentTemplate(ComponentTemplate.parse(parser)); + } return channel -> client.execute(PutComponentTemplateAction.INSTANCE, putRequest, new RestToXContentListener<>(channel)); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutComposableIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutComposableIndexTemplateAction.java index afc291bc6dc26..937022f54dca3 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutComposableIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestPutComposableIndexTemplateAction.java @@ -43,7 +43,9 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC putRequest.masterNodeTimeout(request.paramAsTime("master_timeout", putRequest.masterNodeTimeout())); putRequest.create(request.paramAsBoolean("create", false)); putRequest.cause(request.param("cause", "api")); - putRequest.indexTemplate(ComposableIndexTemplate.parse(request.contentParser())); + try (var parser = request.contentParser()) { + putRequest.indexTemplate(ComposableIndexTemplate.parse(parser)); + } return channel -> client.execute(PutComposableIndexTemplateAction.INSTANCE, putRequest, new RestToXContentListener<>(channel)); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateIndexTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateIndexTemplateAction.java index 420d7a8d70f58..e140628e9bc0d 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateIndexTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateIndexTemplateAction.java @@ -48,7 +48,9 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC PutComposableIndexTemplateAction.Request indexTemplateRequest = new PutComposableIndexTemplateAction.Request( "simulating_template" ); - indexTemplateRequest.indexTemplate(ComposableIndexTemplate.parse(request.contentParser())); + try (var parser = request.contentParser()) { + indexTemplateRequest.indexTemplate(ComposableIndexTemplate.parse(parser)); + } indexTemplateRequest.create(request.paramAsBoolean("create", false)); indexTemplateRequest.cause(request.param("cause", "api")); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateTemplateAction.java index d458c309933a8..cb513f737f3d0 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestSimulateTemplateAction.java @@ -44,7 +44,9 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli PutComposableIndexTemplateAction.Request indexTemplateRequest = new PutComposableIndexTemplateAction.Request( "simulating_template" ); - indexTemplateRequest.indexTemplate(ComposableIndexTemplate.parse(request.contentParser())); + try (var parser = request.contentParser()) { + indexTemplateRequest.indexTemplate(ComposableIndexTemplate.parse(parser)); + } indexTemplateRequest.create(request.paramAsBoolean("create", false)); indexTemplateRequest.cause(request.param("cause", "api")); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java index 74eddca033398..779cb229ca48b 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestUpdateSettingsAction.java @@ -47,7 +47,9 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC updateSettingsRequest.masterNodeTimeout(request.paramAsTime("master_timeout", updateSettingsRequest.masterNodeTimeout())); updateSettingsRequest.indicesOptions(IndicesOptions.fromRequest(request, updateSettingsRequest.indicesOptions())); updateSettingsRequest.reopen(request.paramAsBoolean("reopen", false)); - updateSettingsRequest.fromXContent(request.contentParser()); + try (var parser = request.contentParser()) { + updateSettingsRequest.fromXContent(parser); + } return channel -> client.admin().indices().updateSettings(updateSettingsRequest, new RestToXContentListener<>(channel)); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java index 5e9b2c8452579..068c809554631 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestAllocationAction.java @@ -130,7 +130,7 @@ private Table buildTable(RestRequest request, final ClusterStateResponse state, table.startRow(); table.addCell(shardCount); - table.addCell(nodeStats.getIndices().getStore().getSize()); + table.addCell(nodeStats.getIndices().getStore().size()); table.addCell(used < 0 ? null : ByteSizeValue.ofBytes(used)); table.addCell(avail.getBytes() < 0 ? null : avail); table.addCell(total.getBytes() < 0 ? null : total); diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatComponentTemplateAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatComponentTemplateAction.java index b761c7e3ca054..4a238451bcc69 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatComponentTemplateAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestCatComponentTemplateAction.java @@ -127,24 +127,27 @@ private static int countMappingInTemplate(Template template) throws Exception { } int count = 0; XContentType xContentType = XContentType.JSON; - XContentParser parser = xContentType.xContent() - .createParser(XContentParserConfiguration.EMPTY, template.mappings().uncompressed().array()); - XContentParser.Token token = parser.nextToken(); - String currentFieldName = null; - while (token != XContentParser.Token.END_OBJECT) { - if (token == XContentParser.Token.FIELD_NAME) { - currentFieldName = parser.currentName(); - } else if (token == XContentParser.Token.START_OBJECT) { - if ("_doc".equals(currentFieldName)) { - List list = parser.mapOrdered().values().stream().toList(); - for (Object mapping : list) { - count = count + countSubAttributes(mapping); + try ( + XContentParser parser = xContentType.xContent() + .createParser(XContentParserConfiguration.EMPTY, template.mappings().uncompressed().array()) + ) { + XContentParser.Token token = parser.nextToken(); + String currentFieldName = null; + while (token != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + if ("_doc".equals(currentFieldName)) { + List list = parser.mapOrdered().values().stream().toList(); + for (Object mapping : list) { + count = count + countSubAttributes(mapping); + } } + } else { + parser.skipChildren(); } - } else { - parser.skipChildren(); + token = parser.nextToken(); } - token = parser.nextToken(); } return count; } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java index 7408bf3ab229e..19ebbd2f19df4 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestPendingClusterTasksAction.java @@ -10,6 +10,7 @@ import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequest; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; +import org.elasticsearch.action.admin.cluster.tasks.TransportPendingClusterTasksAction; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.cluster.service.PendingClusterTask; import org.elasticsearch.common.Table; @@ -46,15 +47,17 @@ public RestChannelConsumer doCatRequest(final RestRequest request, final NodeCli PendingClusterTasksRequest pendingClusterTasksRequest = new PendingClusterTasksRequest(); pendingClusterTasksRequest.masterNodeTimeout(request.paramAsTime("master_timeout", pendingClusterTasksRequest.masterNodeTimeout())); pendingClusterTasksRequest.local(request.paramAsBoolean("local", pendingClusterTasksRequest.local())); - return channel -> client.admin() - .cluster() - .pendingClusterTasks(pendingClusterTasksRequest, new RestResponseListener(channel) { + return channel -> client.execute( + TransportPendingClusterTasksAction.TYPE, + pendingClusterTasksRequest, + new RestResponseListener<>(channel) { @Override public RestResponse buildResponse(PendingClusterTasksResponse pendingClusterTasks) throws Exception { Table tab = buildTable(request, pendingClusterTasks); return RestTable.buildResponse(tab, channel); } - }); + } + ); } @Override diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java index 94fffd6582155..a57d45e07fd15 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestShardsAction.java @@ -293,8 +293,8 @@ Table buildTable(RestRequest request, ClusterStateResponse state, IndicesStatsRe } table.addCell(shard.state()); table.addCell(getOrNull(commonStats, CommonStats::getDocs, DocsStats::getCount)); - table.addCell(getOrNull(commonStats, CommonStats::getStore, StoreStats::getSize)); - table.addCell(getOrNull(commonStats, CommonStats::getStore, StoreStats::getTotalDataSetSize)); + table.addCell(getOrNull(commonStats, CommonStats::getStore, StoreStats::size)); + table.addCell(getOrNull(commonStats, CommonStats::getStore, StoreStats::totalDataSetSize)); if (shard.assignedToNode()) { String ip = state.getState().nodes().get(shard.currentNodeId()).getHostAddress(); String nodeId = shard.currentNodeId(); diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java index c232e1a30c553..a881b2497b26c 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestMultiSearchAction.java @@ -142,7 +142,7 @@ public static MultiSearchRequest parseRequest( searchRequest.source(new SearchSourceBuilder().parseXContent(parser, false, searchUsageHolder)); RestSearchAction.validateSearchRequest(restRequest, searchRequest); if (searchRequest.pointInTimeBuilder() != null) { - RestSearchAction.preparePointInTime(searchRequest, restRequest, namedWriteableRegistry); + RestSearchAction.preparePointInTime(searchRequest, restRequest); } else { searchRequest.setCcsMinimizeRoundtrips( restRequest.paramAsBoolean("ccs_minimize_roundtrips", searchRequest.isCcsMinimizeRoundtrips()) diff --git a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java index 41102a3568e30..711aec182525e 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/search/RestSearchAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.search.SearchContextId; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.IndicesOptions; @@ -50,7 +49,6 @@ import java.util.function.IntConsumer; import static org.elasticsearch.action.ValidateActions.addValidationError; -import static org.elasticsearch.action.search.SearchRequest.DEFAULT_INDICES_OPTIONS; import static org.elasticsearch.core.TimeValue.parseTimeValue; import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.POST; @@ -220,7 +218,7 @@ public static void parseSearchRequest( validateSearchRequest(request, searchRequest); if (searchRequest.pointInTimeBuilder() != null) { - preparePointInTime(searchRequest, request, namedWriteableRegistry); + preparePointInTime(searchRequest, request); } else { searchRequest.setCcsMinimizeRoundtrips( request.paramAsBoolean("ccs_minimize_roundtrips", searchRequest.isCcsMinimizeRoundtrips()) @@ -373,44 +371,14 @@ static SuggestBuilder parseSuggestUrlParameters(RestRequest request) { return null; } - static void preparePointInTime(SearchRequest request, RestRequest restRequest, NamedWriteableRegistry namedWriteableRegistry) { + static void preparePointInTime(SearchRequest request, RestRequest restRequest) { assert request.pointInTimeBuilder() != null; ActionRequestValidationException validationException = null; - if (request.indices().length > 0) { - validationException = addValidationError( - "[indices] cannot be used with point in time. Do not specify any index with point in time.", - validationException - ); - } - if (request.indicesOptions().equals(DEFAULT_INDICES_OPTIONS) == false) { - validationException = addValidationError("[indicesOptions] cannot be used with point in time", validationException); - } - if (request.routing() != null) { - validationException = addValidationError("[routing] cannot be used with point in time", validationException); - } - if (request.preference() != null) { - validationException = addValidationError("[preference] cannot be used with point in time", validationException); - } if (restRequest.paramAsBoolean("ccs_minimize_roundtrips", false)) { validationException = addValidationError("[ccs_minimize_roundtrips] cannot be used with point in time", validationException); request.setCcsMinimizeRoundtrips(false); } ExceptionsHelper.reThrowIfNotNull(validationException); - - final IndicesOptions indicesOptions = request.indicesOptions(); - final IndicesOptions stricterIndicesOptions = IndicesOptions.fromOptions( - indicesOptions.ignoreUnavailable(), - indicesOptions.allowNoIndices(), - false, - false, - false, - true, - true, - indicesOptions.ignoreThrottled() - ); - request.indicesOptions(stricterIndicesOptions); - final SearchContextId searchContextId = request.pointInTimeBuilder().getSearchContextId(namedWriteableRegistry); - request.indices(searchContextId.getActualIndices()); } /** diff --git a/server/src/main/java/org/elasticsearch/search/SearchService.java b/server/src/main/java/org/elasticsearch/search/SearchService.java index 9e59bfda96d19..8a03c7e9f08ba 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchService.java +++ b/server/src/main/java/org/elasticsearch/search/SearchService.java @@ -115,7 +115,6 @@ import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.search.suggest.completion.CompletionSuggestion; import org.elasticsearch.tasks.TaskCancelledException; -import org.elasticsearch.telemetry.tracing.SpanId; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.threadpool.Scheduler; import org.elasticsearch.threadpool.Scheduler.Cancellable; @@ -493,7 +492,7 @@ public void executeDfsPhase(ShardSearchRequest request, SearchShardTask task, Ac private DfsSearchResult executeDfsPhase(ShardSearchRequest request, SearchShardTask task) throws IOException { ReaderContext readerContext = createOrGetReaderContext(request); try (@SuppressWarnings("unused") // withScope call is necessary to instrument search execution - Releasable scope = tracer.withScope(SpanId.forTask(task)); + Releasable scope = tracer.withScope(task); Releasable ignored = readerContext.markAsUsed(getKeepAlive(request)); SearchContext context = createContext(readerContext, request, task, ResultsType.DFS, false) ) { @@ -665,9 +664,8 @@ private static void runAsync( */ private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchShardTask task) throws Exception { final ReaderContext readerContext = createOrGetReaderContext(request); - SpanId spanId = SpanId.forTask(task); try ( - Releasable scope = tracer.withScope(spanId); + Releasable scope = tracer.withScope(task); Releasable ignored = readerContext.markAsUsed(getKeepAlive(request)); SearchContext context = createContext(readerContext, request, task, ResultsType.QUERY, true) ) { @@ -680,7 +678,7 @@ private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchSh } afterQueryTime = executor.success(); } finally { - tracer.stopTrace(spanId); + tracer.stopTrace(task); } if (request.numberOfShards() == 1 && (request.source() == null || request.source().rankBuilder() == null)) { // we already have query results, but we can run fetch at the same time @@ -711,7 +709,7 @@ private SearchPhaseResult executeQueryPhase(ShardSearchRequest request, SearchSh private QueryFetchSearchResult executeFetchPhase(ReaderContext reader, SearchContext context, long afterQueryTime) { try ( - Releasable scope = tracer.withScope(SpanId.forTask(context.getTask())); + Releasable scope = tracer.withScope(context.getTask()); SearchOperationListenerExecutor executor = new SearchOperationListenerExecutor(context, true, afterQueryTime) ) { fetchPhase.execute(context, shortcutDocIdsToLoad(context)); diff --git a/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java b/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java deleted file mode 100644 index 392f60ba36cd0..0000000000000 --- a/server/src/main/java/org/elasticsearch/search/internal/InternalSearchResponse.java +++ /dev/null @@ -1,80 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.search.internal; - -import org.elasticsearch.action.search.SearchResponseSections; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.profile.SearchProfileResults; -import org.elasticsearch.search.suggest.Suggest; - -import java.io.IOException; - -/** - * {@link SearchResponseSections} subclass that can be serialized over the wire. - */ -public class InternalSearchResponse extends SearchResponseSections implements Writeable { - public static final InternalSearchResponse EMPTY_WITH_TOTAL_HITS = new InternalSearchResponse( - SearchHits.EMPTY_WITH_TOTAL_HITS, - null, - null, - null, - false, - null, - 1 - ); - - public static final InternalSearchResponse EMPTY_WITHOUT_TOTAL_HITS = new InternalSearchResponse( - SearchHits.EMPTY_WITHOUT_TOTAL_HITS, - null, - null, - null, - false, - null, - 1 - ); - - public InternalSearchResponse( - SearchHits hits, - InternalAggregations aggregations, - Suggest suggest, - SearchProfileResults profileResults, - boolean timedOut, - Boolean terminatedEarly, - int numReducePhases - ) { - super(hits, aggregations, suggest, timedOut, terminatedEarly, profileResults, numReducePhases); - } - - public InternalSearchResponse(StreamInput in) throws IOException { - super( - new SearchHits(in), - in.readBoolean() ? InternalAggregations.readFrom(in) : null, - in.readBoolean() ? new Suggest(in) : null, - in.readBoolean(), - in.readOptionalBoolean(), - in.readOptionalWriteable(SearchProfileResults::new), - in.readVInt() - ); - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - hits.writeTo(out); - out.writeOptionalWriteable((InternalAggregations) aggregations); - out.writeOptionalWriteable(suggest); - out.writeBoolean(timedOut); - out.writeOptionalBoolean(terminatedEarly); - out.writeOptionalWriteable(profileResults); - out.writeVInt(numReducePhases); - } -} diff --git a/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java b/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java index a58e7fa7d4a2b..761936b43053c 100644 --- a/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java +++ b/server/src/main/java/org/elasticsearch/search/lookup/SourceFilter.java @@ -94,12 +94,13 @@ private Function buildBytesFilter() { BytesStreamOutput streamOutput = new BytesStreamOutput(1024); XContent xContent = in.sourceContentType().xContent(); XContentBuilder builder = new XContentBuilder(xContent, streamOutput); - XContentParser parser = xContent.createParser(parserConfig, in.internalSourceRef().streamInput()); - if ((parser.currentToken() == null) && (parser.nextToken() == null)) { - return Source.empty(in.sourceContentType()); + try (XContentParser parser = xContent.createParser(parserConfig, in.internalSourceRef().streamInput())) { + if ((parser.currentToken() == null) && (parser.nextToken() == null)) { + return Source.empty(in.sourceContentType()); + } + builder.copyCurrentStructure(parser); + return Source.fromBytes(BytesReference.bytes(builder)); } - builder.copyCurrentStructure(parser); - return Source.fromBytes(BytesReference.bytes(builder)); } catch (IOException e) { throw new UncheckedIOException(e); } diff --git a/server/src/main/java/org/elasticsearch/tasks/Task.java b/server/src/main/java/org/elasticsearch/tasks/Task.java index 3726ba265e433..83ee08574df4e 100644 --- a/server/src/main/java/org/elasticsearch/tasks/Task.java +++ b/server/src/main/java/org/elasticsearch/tasks/Task.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.NamedWriteable; +import org.elasticsearch.telemetry.tracing.Traceable; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.ToXContentObject; @@ -21,7 +22,7 @@ /** * Current task information */ -public class Task { +public class Task implements Traceable { /** * The request header to mark tasks with specific ids @@ -265,4 +266,9 @@ public TaskResult result(DiscoveryNode node, ActionResponse response) throws IOE throw new IllegalStateException("response has to implement ToXContent to be able to store the results"); } } + + @Override + public String getSpanId() { + return "task-" + getId(); + } } diff --git a/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java b/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java index e0ef4feb0ae35..377c7b3847b0b 100644 --- a/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java +++ b/server/src/main/java/org/elasticsearch/tasks/TaskInfo.java @@ -67,7 +67,7 @@ public static TaskInfo from(StreamInput in) throws IOException { return new TaskInfo( taskId, in.readString(), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_055) ? in.readString() : taskId.getNodeId(), + in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) ? in.readString() : taskId.getNodeId(), in.readString(), in.readOptionalString(), in.readOptionalNamedWriteable(Task.Status.class), @@ -84,7 +84,7 @@ public static TaskInfo from(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { taskId.writeTo(out); out.writeString(type); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_055)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { out.writeString(node); } out.writeString(action); diff --git a/server/src/main/java/org/elasticsearch/telemetry/tracing/SpanId.java b/server/src/main/java/org/elasticsearch/telemetry/tracing/SpanId.java deleted file mode 100644 index 8a22102baadf9..0000000000000 --- a/server/src/main/java/org/elasticsearch/telemetry/tracing/SpanId.java +++ /dev/null @@ -1,56 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ - -package org.elasticsearch.telemetry.tracing; - -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.tasks.Task; - -import java.util.Objects; - -public class SpanId { - private final String rawId; - - private SpanId(String rawId) { - this.rawId = Objects.requireNonNull(rawId); - } - - public String getRawId() { - return rawId; - } - - @Override - public String toString() { - return "SpanId[" + rawId + "]"; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - SpanId spanId = (SpanId) o; - return rawId.equals(spanId.rawId); - } - - @Override - public int hashCode() { - return Objects.hash(rawId); - } - - public static SpanId forTask(Task task) { - return new SpanId("task-" + task.getId()); - } - - public static SpanId forRestRequest(RestRequest restRequest) { - return new SpanId("rest-" + restRequest.getRequestId()); - } - - public static SpanId forBareString(String rawId) { - return new SpanId(rawId); - } -} diff --git a/server/src/main/java/org/elasticsearch/telemetry/tracing/Traceable.java b/server/src/main/java/org/elasticsearch/telemetry/tracing/Traceable.java new file mode 100644 index 0000000000000..64c8635d75dd8 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/telemetry/tracing/Traceable.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.telemetry.tracing; + +/** + * A class that can be traced using the telemetry tracing API + */ +public interface Traceable { + /** + * A consistent id for the span. Should be structured "[short-name]-[unique-id]" ie "request-abc1234" + */ + String getSpanId(); +} diff --git a/server/src/main/java/org/elasticsearch/telemetry/tracing/Tracer.java b/server/src/main/java/org/elasticsearch/telemetry/tracing/Tracer.java index f54857091b778..2c54c48641293 100644 --- a/server/src/main/java/org/elasticsearch/telemetry/tracing/Tracer.java +++ b/server/src/main/java/org/elasticsearch/telemetry/tracing/Tracer.java @@ -10,8 +10,6 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.core.Releasable; -import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.tasks.Task; import java.util.Map; @@ -38,25 +36,13 @@ public interface Tracer { /** * Called when a span starts. * @param threadContext the current context. Required for tracing parent/child span activity. - * @param spanId a unique identifier for the activity, and will not be sent to the tracing system. Add the ID - * to the attributes if it is important + * @param traceable provides a unique identifier for the activity, and will not be sent to the tracing system. Add the ID + * to the attributes if it is important * @param name the name of the span. Used to filter out spans, but also sent to the tracing system * @param attributes arbitrary key/value data for the span. Sent to the tracing system */ - void startTrace(ThreadContext threadContext, SpanId spanId, String name, Map attributes); - - /** - * @see Tracer#startTrace(ThreadContext, SpanId, String, Map) - */ - default void startTrace(ThreadContext threadContext, Task task, String name, Map attributes) { - startTrace(threadContext, SpanId.forTask(task), name, attributes); - } - - /** - * @see Tracer#startTrace(ThreadContext, SpanId, String, Map) - */ - default void startTrace(ThreadContext threadContext, RestRequest restRequest, String name, Map attributes) { - startTrace(threadContext, SpanId.forRestRequest(restRequest), name, attributes); + default void startTrace(ThreadContext threadContext, Traceable traceable, String name, Map attributes) { + startTrace(threadContext, traceable, name, attributes); } /** @@ -67,23 +53,9 @@ default void startTrace(ThreadContext threadContext, RestRequest restRequest, St /** * Called when a span ends. - * @param spanId an identifier for the span + * @param traceable provides an identifier for the span */ - void stopTrace(SpanId spanId); - - /** - * @see Tracer#stopTrace(SpanId) - */ - default void stopTrace(Task task) { - stopTrace(SpanId.forTask(task)); - } - - /** - * @see Tracer#stopTrace(SpanId) - */ - default void stopTrace(RestRequest restRequest) { - stopTrace(SpanId.forRestRequest(restRequest)); - } + void stopTrace(Traceable traceable); /** * Called when a span ends. This version of the method relies on context to select the span to stop. @@ -94,58 +66,51 @@ default void stopTrace(RestRequest restRequest) { * Some tracing implementations support the concept of "events" within a span, marking a point in time during the span * when something interesting happened. If the tracing implementation doesn't support events, then nothing will be recorded. * This should only be called when a trace already been started on the {@code traceable}. - * @param spanId an identifier for the span + * @param traceable provides an identifier for the span * @param eventName the event that happened. This should be something meaningful to people reviewing the data, for example * "send response", "finished processing", "validated request", etc. */ - void addEvent(SpanId spanId, String eventName); + void addEvent(Traceable traceable, String eventName); /** * If an exception occurs during a span, you can add data about the exception to the span where the exception occurred. * This should only be called when a span has been started, otherwise it has no effect. - * @param spanId an identifier for the span + * @param traceable provides an identifier for the span * @param throwable the exception that occurred. */ - void addError(SpanId spanId, Throwable throwable); - - /** - * @see Tracer#addError(SpanId, Throwable) - */ - default void addError(RestRequest restRequest, Throwable throwable) { - addError(SpanId.forRestRequest(restRequest), throwable); - } + void addError(Traceable traceable, Throwable throwable); /** * Adds a boolean attribute to an active span. These will be sent to the endpoint that collects tracing data. - * @param spanId an identifier for the span + * @param traceable provides an identifier for the span * @param key the attribute key * @param value the attribute value */ - void setAttribute(SpanId spanId, String key, boolean value); + void setAttribute(Traceable traceable, String key, boolean value); /** * Adds a double attribute to an active span. These will be sent to the endpoint that collects tracing data. - * @param spanId an identifier for the span + * @param traceable provides an identifier for the span * @param key the attribute key * @param value the attribute value */ - void setAttribute(SpanId spanId, String key, double value); + void setAttribute(Traceable traceable, String key, double value); /** * Adds a long attribute to an active span. These will be sent to the endpoint that collects tracing data. - * @param spanId an identifier for the span + * @param traceable provides an identifier for the span * @param key the attribute key * @param value the attribute value */ - void setAttribute(SpanId spanId, String key, long value); + void setAttribute(Traceable traceable, String key, long value); /** * Adds a String attribute to an active span. These will be sent to the endpoint that collects tracing data. - * @param spanId an identifier for the span + * @param traceable provides an identifier for the span * @param key the attribute key * @param value the attribute value */ - void setAttribute(SpanId spanId, String key, String value); + void setAttribute(Traceable traceable, String key, String value); /** * Usually you won't need this about scopes when using tracing. However, @@ -172,10 +137,10 @@ default void addError(RestRequest restRequest, Throwable throwable) { *

Nonetheless, it is possible to manually use scope where more detail is needed by * explicitly opening a scope via the `Tracer`. * - * @param spanId an identifier for the span + * @param traceable provides an identifier for the span * @return a scope. You MUST close it when you are finished with it. */ - Releasable withScope(SpanId spanId); + Releasable withScope(Traceable traceable); /** * A Tracer implementation that does nothing. This is used when no tracer is configured, @@ -183,52 +148,37 @@ default void addError(RestRequest restRequest, Throwable throwable) { */ Tracer NOOP = new Tracer() { @Override - public void startTrace(ThreadContext threadContext, SpanId spanId, String name, Map attributes) {} - - @Override - public void startTrace(ThreadContext threadContext, Task task, String name, Map attributes) {} - - @Override - public void startTrace(ThreadContext threadContext, RestRequest restRequest, String name, Map attributes) {} + public void startTrace(ThreadContext threadContext, Traceable traceable, String name, Map attributes) {} @Override public void startTrace(String name, Map attributes) {} @Override - public void stopTrace(SpanId spanId) {} - - @Override - public void stopTrace(Task task) {} - - @Override - public void stopTrace(RestRequest restRequest) {} + public void stopTrace(Traceable traceable) {} @Override public void stopTrace() {} @Override - public void addEvent(SpanId spanId, String eventName) {} - - @Override - public void addError(SpanId spanId, Throwable throwable) {} + public void addEvent(Traceable traceable, String eventName) {} @Override - public void addError(RestRequest restRequest, Throwable throwable) {} + public void addError(Traceable traceable, Throwable throwable) {} @Override - public void setAttribute(SpanId spanId, String key, boolean value) {} + public void setAttribute(Traceable traceable, String key, boolean value) {} @Override - public void setAttribute(SpanId spanId, String key, double value) {} + public void setAttribute(Traceable traceable, String key, double value) {} @Override - public void setAttribute(SpanId spanId, String key, long value) {} + public void setAttribute(Traceable traceable, String key, long value) {} @Override - public void setAttribute(SpanId spanId, String key, String value) {} + public void setAttribute(Traceable traceable, String key, String value) {} @Override - public Releasable withScope(SpanId spanId) { + public Releasable withScope(Traceable traceable) { return () -> {}; } }; diff --git a/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java b/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java index 320b9cfdbf7e6..cfb6f872ce748 100644 --- a/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java +++ b/server/src/main/java/org/elasticsearch/transport/ProxyConnectionStrategy.java @@ -179,7 +179,7 @@ public class ProxyConnectionStrategy extends RemoteConnectionStrategy { RemoteConnectionManager.wrapConnectionWithRemoteClusterInfo( newConnection, clusterAlias, - actualProfile.getTransportProfile() + connectionManager.getCredentialsManager() ), actualProfile.getHandshakeTimeout(), cn -> true, diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java index a055e4122257f..3c74e46851504 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterConnection.java @@ -57,15 +57,28 @@ final class RemoteClusterConnection implements Closeable { * @param settings the nodes settings object * @param clusterAlias the configured alias of the cluster to connect to * @param transportService the local nodes transport service - * @param credentialsProtected Whether the remote cluster is protected by a credentials, i.e. it has a credentials configured - * via secure setting. This means the remote cluster uses the new configurable access RCS model - * (as opposed to the basic model). + * @param credentialsManager object to lookup remote cluster credentials by cluster alias. If a cluster is protected by a credential, + * i.e. it has a credential configured via secure setting. + * This means the remote cluster uses the advances RCS model (as opposed to the basic model). */ - RemoteClusterConnection(Settings settings, String clusterAlias, TransportService transportService, boolean credentialsProtected) { + RemoteClusterConnection( + Settings settings, + String clusterAlias, + TransportService transportService, + RemoteClusterCredentialsManager credentialsManager + ) { this.transportService = transportService; this.clusterAlias = clusterAlias; - ConnectionProfile profile = RemoteConnectionStrategy.buildConnectionProfile(clusterAlias, settings, credentialsProtected); - this.remoteConnectionManager = new RemoteConnectionManager(clusterAlias, createConnectionManager(profile, transportService)); + ConnectionProfile profile = RemoteConnectionStrategy.buildConnectionProfile( + clusterAlias, + settings, + credentialsManager.hasCredentials(clusterAlias) + ); + this.remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + credentialsManager, + createConnectionManager(profile, transportService) + ); this.connectionStrategy = RemoteConnectionStrategy.buildStrategy(clusterAlias, transportService, remoteConnectionManager, settings); // we register the transport service here as a listener to make sure we notify handlers on disconnect etc. this.remoteConnectionManager.addListener(transportService); diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterCredentialsManager.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterCredentialsManager.java new file mode 100644 index 0000000000000..064e868970ef5 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterCredentialsManager.java @@ -0,0 +1,52 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.transport; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; + +import java.util.Map; + +import static org.elasticsearch.transport.RemoteClusterService.REMOTE_CLUSTER_CREDENTIALS; + +public class RemoteClusterCredentialsManager { + + private static final Logger logger = LogManager.getLogger(RemoteClusterCredentialsManager.class); + + private volatile Map clusterCredentials; + + public RemoteClusterCredentialsManager(Settings settings) { + updateClusterCredentials(settings); + } + + public final void updateClusterCredentials(Settings settings) { + clusterCredentials = REMOTE_CLUSTER_CREDENTIALS.getAsMap(settings); + logger.debug( + () -> Strings.format( + "Updated remote cluster credentials for clusters: [%s]", + Strings.collectionToCommaDelimitedString(clusterCredentials.keySet()) + ) + ); + } + + @Nullable + public SecureString resolveCredentials(String clusterAlias) { + return clusterCredentials.get(clusterAlias); + } + + public boolean hasCredentials(String clusterAlias) { + return clusterCredentials.containsKey(clusterAlias); + } + + public static final RemoteClusterCredentialsManager EMPTY = new RemoteClusterCredentialsManager(Settings.EMPTY); +} diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterPortSettings.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterPortSettings.java index 814b17bac95ef..fd5c39ec5fb1f 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterPortSettings.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterPortSettings.java @@ -39,7 +39,7 @@ */ public class RemoteClusterPortSettings { - public static final TransportVersion TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY = TransportVersions.V_8_500_059; + public static final TransportVersion TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY = TransportVersions.V_8_500_061; public static final String REMOTE_CLUSTER_PROFILE = "_remote_cluster"; public static final String REMOTE_CLUSTER_PREFIX = "remote_cluster."; diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java index c38f4b26c665f..6bfbb95cbcfe9 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteClusterService.java @@ -147,15 +147,14 @@ public boolean isRemoteClusterServerEnabled() { private final TransportService transportService; private final Map remoteClusters = ConcurrentCollections.newConcurrentMap(); - private final Set credentialsProtectedRemoteClusters; + private final RemoteClusterCredentialsManager remoteClusterCredentialsManager; RemoteClusterService(Settings settings, TransportService transportService) { super(settings); this.enabled = DiscoveryNode.isRemoteClusterClient(settings); this.remoteClusterServerEnabled = REMOTE_CLUSTER_SERVER_ENABLED.get(settings); this.transportService = transportService; - this.credentialsProtectedRemoteClusters = REMOTE_CLUSTER_CREDENTIALS.getAsMap(settings).keySet(); - + this.remoteClusterCredentialsManager = new RemoteClusterCredentialsManager(settings); if (remoteClusterServerEnabled) { registerRemoteClusterHandshakeRequestHandler(transportService); } @@ -305,6 +304,14 @@ private synchronized void updateSkipUnavailable(String clusterAlias, Boolean ski } } + public void updateRemoteClusterCredentials(Settings settings) { + remoteClusterCredentialsManager.updateClusterCredentials(settings); + } + + public RemoteClusterCredentialsManager getRemoteClusterCredentialsManager() { + return remoteClusterCredentialsManager; + } + @Override protected void updateRemoteCluster(String clusterAlias, Settings settings) { CountDownLatch latch = new CountDownLatch(1); @@ -363,12 +370,7 @@ synchronized void updateRemoteCluster( if (remote == null) { // this is a new cluster we have to add a new representation Settings finalSettings = Settings.builder().put(this.settings, false).put(newSettings, false).build(); - remote = new RemoteClusterConnection( - finalSettings, - clusterAlias, - transportService, - credentialsProtectedRemoteClusters.contains(clusterAlias) - ); + remote = new RemoteClusterConnection(finalSettings, clusterAlias, transportService, remoteClusterCredentialsManager); remoteClusters.put(clusterAlias, remote); remote.ensureConnected(listener.map(ignored -> RemoteClusterConnectionStatus.CONNECTED)); } else if (remote.shouldRebuildConnection(newSettings)) { @@ -380,12 +382,7 @@ synchronized void updateRemoteCluster( } remoteClusters.remove(clusterAlias); Settings finalSettings = Settings.builder().put(this.settings, false).put(newSettings, false).build(); - remote = new RemoteClusterConnection( - finalSettings, - clusterAlias, - transportService, - credentialsProtectedRemoteClusters.contains(clusterAlias) - ); + remote = new RemoteClusterConnection(finalSettings, clusterAlias, transportService, remoteClusterCredentialsManager); remoteClusters.put(clusterAlias, remote); remote.ensureConnected(listener.map(ignored -> RemoteClusterConnectionStatus.RECONNECTED)); } else { diff --git a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java index b16734b273376..3b531d54fb033 100644 --- a/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java +++ b/server/src/main/java/org/elasticsearch/transport/RemoteConnectionManager.java @@ -12,6 +12,7 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; @@ -25,18 +26,19 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicLong; -import static org.elasticsearch.transport.RemoteClusterPortSettings.REMOTE_CLUSTER_PROFILE; import static org.elasticsearch.transport.RemoteClusterService.REMOTE_CLUSTER_HANDSHAKE_ACTION_NAME; public class RemoteConnectionManager implements ConnectionManager { private final String clusterAlias; + private final RemoteClusterCredentialsManager credentialsManager; private final ConnectionManager delegate; private final AtomicLong counter = new AtomicLong(); private volatile List connectedNodes = Collections.emptyList(); - RemoteConnectionManager(String clusterAlias, ConnectionManager delegate) { + RemoteConnectionManager(String clusterAlias, RemoteClusterCredentialsManager credentialsManager, ConnectionManager delegate) { this.clusterAlias = clusterAlias; + this.credentialsManager = credentialsManager; this.delegate = delegate; this.delegate.addListener(new TransportConnectionListener() { @Override @@ -51,6 +53,10 @@ public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connecti }); } + public RemoteClusterCredentialsManager getCredentialsManager() { + return credentialsManager; + } + /** * Remote cluster connections have a different lifecycle from intra-cluster connections. Use {@link #connectToRemoteClusterNode} * instead of this method. @@ -95,13 +101,7 @@ public void openConnection(DiscoveryNode node, @Nullable ConnectionProfile profi node, profile, listener.delegateFailureAndWrap( - (l, connection) -> l.onResponse( - new InternalRemoteConnection( - connection, - clusterAlias, - profile != null ? profile.getTransportProfile() : getConnectionProfile().getTransportProfile() - ) - ) + (l, connection) -> l.onResponse(wrapConnectionWithRemoteClusterInfo(connection, clusterAlias, credentialsManager)) ) ); } @@ -182,16 +182,35 @@ public void closeNoBlock() { * @return a cluster alias if the connection target a node in the remote cluster, otherwise an empty result */ public static Optional resolveRemoteClusterAlias(Transport.Connection connection) { + return resolveRemoteClusterAliasWithCredentials(connection).map(RemoteClusterAliasWithCredentials::clusterAlias); + } + + public record RemoteClusterAliasWithCredentials(String clusterAlias, @Nullable SecureString credentials) { + @Override + public String toString() { + return "RemoteClusterAliasWithCredentials{clusterAlias='" + clusterAlias + "', credentials='::es_redacted::'}"; + } + } + + /** + * This method returns information (alias and credentials) for remote cluster for the given transport connection. + * Either or both of alias and credentials can be null depending on the connection. + * + * @param connection the transport connection for which to resolve a remote cluster alias + */ + public static Optional resolveRemoteClusterAliasWithCredentials(Transport.Connection connection) { Transport.Connection unwrapped = TransportService.unwrapConnection(connection); if (unwrapped instanceof InternalRemoteConnection remoteConnection) { - return Optional.of(remoteConnection.getClusterAlias()); + return Optional.of( + new RemoteClusterAliasWithCredentials(remoteConnection.getClusterAlias(), remoteConnection.getClusterCredentials()) + ); } return Optional.empty(); } private Transport.Connection getConnectionInternal(DiscoveryNode node) throws NodeNotConnectedException { Transport.Connection connection = delegate.getConnection(node); - return new InternalRemoteConnection(connection, clusterAlias, getConnectionProfile().getTransportProfile()); + return wrapConnectionWithRemoteClusterInfo(connection, clusterAlias, credentialsManager); } private synchronized void addConnectedNode(DiscoveryNode addedNode) { @@ -297,21 +316,27 @@ private static final class InternalRemoteConnection implements Transport.Connect private static final Logger logger = LogManager.getLogger(InternalRemoteConnection.class); private final Transport.Connection connection; private final String clusterAlias; - private final boolean isRemoteClusterProfile; + @Nullable + private final SecureString clusterCredentials; - InternalRemoteConnection(Transport.Connection connection, String clusterAlias, String transportProfile) { + private InternalRemoteConnection(Transport.Connection connection, String clusterAlias, @Nullable SecureString clusterCredentials) { assert false == connection instanceof InternalRemoteConnection : "should not double wrap"; assert false == connection instanceof ProxyConnection : "proxy connection should wrap internal remote connection, not the other way around"; - this.clusterAlias = Objects.requireNonNull(clusterAlias); this.connection = Objects.requireNonNull(connection); - this.isRemoteClusterProfile = REMOTE_CLUSTER_PROFILE.equals(Objects.requireNonNull(transportProfile)); + this.clusterAlias = Objects.requireNonNull(clusterAlias); + this.clusterCredentials = clusterCredentials; } public String getClusterAlias() { return clusterAlias; } + @Nullable + public SecureString getClusterCredentials() { + return clusterCredentials; + } + @Override public DiscoveryNode getNode() { return connection.getNode(); @@ -321,7 +346,7 @@ public DiscoveryNode getNode() { public void sendRequest(long requestId, String action, TransportRequest request, TransportRequestOptions options) throws IOException, TransportException { final String effectiveAction; - if (isRemoteClusterProfile && TransportService.HANDSHAKE_ACTION_NAME.equals(action)) { + if (clusterCredentials != null && TransportService.HANDSHAKE_ACTION_NAME.equals(action)) { logger.trace("sending remote cluster specific handshake to node [{}] of remote cluster [{}]", getNode(), clusterAlias); effectiveAction = REMOTE_CLUSTER_HANDSHAKE_ACTION_NAME; } else { @@ -389,8 +414,8 @@ public boolean hasReferences() { static InternalRemoteConnection wrapConnectionWithRemoteClusterInfo( Transport.Connection connection, String clusterAlias, - String transportProfile + RemoteClusterCredentialsManager credentialsManager ) { - return new InternalRemoteConnection(connection, clusterAlias, transportProfile); + return new InternalRemoteConnection(connection, clusterAlias, credentialsManager.resolveCredentials(clusterAlias)); } } diff --git a/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java b/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java index 0dcad9cf6864c..0f68a58faf463 100644 --- a/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java +++ b/server/src/main/java/org/elasticsearch/transport/SniffConnectionStrategy.java @@ -357,7 +357,11 @@ private ConnectionManager.ConnectionValidator getConnectionValidator(DiscoveryNo : "transport profile must be consistent between the connection manager and the actual profile"; transportService.connectionValidator(node) .validate( - RemoteConnectionManager.wrapConnectionWithRemoteClusterInfo(connection, clusterAlias, profile.getTransportProfile()), + RemoteConnectionManager.wrapConnectionWithRemoteClusterInfo( + connection, + clusterAlias, + connectionManager.getCredentialsManager() + ), profile, listener ); diff --git a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat index 119a708832948..bdb1b75be4843 100644 --- a/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat +++ b/server/src/main/resources/META-INF/services/org.apache.lucene.codecs.PostingsFormat @@ -1,2 +1,3 @@ org.elasticsearch.index.codec.bloomfilter.ES85BloomFilterPostingsFormat org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat +org.elasticsearch.index.codec.postings.ES812PostingsFormat diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java index 7b452beac0938..a063c590a8c07 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/allocation/DesiredBalanceResponseTests.java @@ -179,9 +179,14 @@ public void testToXContent() throws IOException { randomClusterInfo() ); - Map json = createParser( - ChunkedToXContent.wrapAsToXContent(response).toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS) - ).map(); + Map json; + try ( + var parser = createParser( + ChunkedToXContent.wrapAsToXContent(response).toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS) + ) + ) { + json = parser.map(); + } assertThat(json.keySet(), containsInAnyOrder("stats", "cluster_balance_stats", "routing_table", "cluster_info")); // stats diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java index a1d2ef33d85f3..774093834e941 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/settings/ClusterUpdateSettingsRequestTests.java @@ -61,10 +61,11 @@ private void doFromXContentTestWithRandomFields(boolean addRandomFields) throws () -> randomAlphaOfLengthBetween(3, 10) ) ); - XContentParseException iae = expectThrows( - XContentParseException.class, - () -> ClusterUpdateSettingsRequest.fromXContent(createParser(xContentType.xContent(), mutated)) - ); + XContentParseException iae = expectThrows(XContentParseException.class, () -> { + try (var parser = createParser(xContentType.xContent(), mutated)) { + ClusterUpdateSettingsRequest.fromXContent(parser); + } + }); assertThat(iae.getMessage(), containsString("[cluster_update_settings_request] unknown field [" + unsupportedField + "]")); } else { try (XContentParser parser = createParser(xContentType.xContent(), originalBytes)) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java index 2f151e516cde4..97a5775f7c69f 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java @@ -99,15 +99,18 @@ public void testToXContent() throws IOException { } XContentBuilder builder = original.toXContent(XContentFactory.jsonBuilder(), new MapParams(Collections.emptyMap())); - XContentParser parser = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, null, BytesReference.bytes(builder).streamInput()); - Map map = parser.mapOrdered(); - CreateSnapshotRequest processed = new CreateSnapshotRequest((String) map.get("repository"), (String) map.get("snapshot")); - processed.waitForCompletion(original.waitForCompletion()); - processed.masterNodeTimeout(original.masterNodeTimeout()); - processed.source(map); - - assertEquals(original, processed); + try ( + XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, null, BytesReference.bytes(builder).streamInput()) + ) { + Map map = parser.mapOrdered(); + CreateSnapshotRequest processed = new CreateSnapshotRequest((String) map.get("repository"), (String) map.get("snapshot")); + processed.waitForCompletion(original.waitForCompletion()); + processed.masterNodeTimeout(original.masterNodeTimeout()); + processed.source(map); + + assertEquals(original, processed); + } } public void testSizeCheck() { diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java index 922e7e03c7600..56216d2670150 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/restore/RestoreSnapshotRequestTests.java @@ -130,9 +130,13 @@ public void testSource() throws IOException { original.snapshotUuid(null); // cannot be set via the REST API original.quiet(false); // cannot be set via the REST API XContentBuilder builder = original.toXContent(XContentFactory.jsonBuilder(), new ToXContent.MapParams(Collections.emptyMap())); - XContentParser parser = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, null, BytesReference.bytes(builder).streamInput()); - Map map = parser.mapOrdered(); + Map map; + try ( + XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, null, BytesReference.bytes(builder).streamInput()) + ) { + map = parser.mapOrdered(); + } // we will only restore properties from the map that are contained in the request body. All other // properties are restored from the original (in the actual REST action this is restored from the @@ -174,8 +178,11 @@ public void testToStringWillIncludeSkipOperatorOnlyState() { private Map convertRequestToMap(RestoreSnapshotRequest request) throws IOException { XContentBuilder builder = request.toXContent(XContentFactory.jsonBuilder(), new ToXContent.MapParams(Collections.emptyMap())); - XContentParser parser = XContentType.JSON.xContent() - .createParser(NamedXContentRegistry.EMPTY, null, BytesReference.bytes(builder).streamInput()); - return parser.mapOrdered(); + try ( + XContentParser parser = XContentType.JSON.xContent() + .createParser(NamedXContentRegistry.EMPTY, null, BytesReference.bytes(builder).streamInput()) + ) { + return parser.mapOrdered(); + } } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java index 6c79946cce15f..d6cf90034f5b5 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/diskusage/IndexDiskUsageAnalyzerTests.java @@ -14,7 +14,6 @@ import org.apache.lucene.codecs.lucene90.Lucene90DocValuesFormat; import org.apache.lucene.codecs.lucene99.Lucene99Codec; import org.apache.lucene.codecs.lucene99.Lucene99HnswVectorsFormat; -import org.apache.lucene.codecs.lucene99.Lucene99PostingsFormat; import org.apache.lucene.codecs.perfield.PerFieldDocValuesFormat; import org.apache.lucene.codecs.perfield.PerFieldKnnVectorsFormat; import org.apache.lucene.codecs.perfield.PerFieldPostingsFormat; @@ -67,6 +66,7 @@ import org.apache.lucene.util.FixedBitSet; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.index.codec.postings.ES812PostingsFormat; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.store.LuceneFilesExtensions; import org.elasticsearch.test.ESTestCase; @@ -642,7 +642,7 @@ static void rewriteIndexWithPerFieldCodec(Directory source, CodecMode mode, Dire .setCodec(new Lucene99Codec(mode.mode()) { @Override public PostingsFormat getPostingsFormatForField(String field) { - return new Lucene99PostingsFormat(); + return new ES812PostingsFormat(); } @Override diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java index 8b700ecb9fc01..1290729252d0d 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverRequestTests.java @@ -66,7 +66,9 @@ public void testConditionsParsing() throws Exception { .field("min_primary_shard_docs", 10) .endObject() .endObject(); - request.fromXContent(false, createParser(builder)); + try (var parser = createParser(builder)) { + request.fromXContent(false, parser); + } Map> conditions = request.getConditions().getConditions(); assertThat(conditions.size(), equalTo(10)); MaxAgeCondition maxAgeCondition = (MaxAgeCondition) conditions.get(MaxAgeCondition.NAME); @@ -118,7 +120,9 @@ public void testParsingWithIndexSettings() throws Exception { .endObject() .endObject() .endObject(); - request.fromXContent(false, createParser(builder)); + try (var parser = createParser(builder)) { + request.fromXContent(false, parser); + } Map> conditions = request.getConditions().getConditions(); assertThat(conditions.size(), equalTo(3)); assertThat(request.getCreateIndexRequest().mappings(), containsString("not_analyzed")); @@ -139,8 +143,9 @@ public void testTypelessMappingParsing() throws Exception { .endObject() .endObject(); - request.fromXContent(false, createParser(builder)); - + try (var parser = createParser(builder)) { + request.fromXContent(false, parser); + } CreateIndexRequest createIndexRequest = request.getCreateIndexRequest(); String mapping = createIndexRequest.mappings(); assertNotNull(mapping); @@ -198,7 +203,11 @@ public void testUnknownFields() throws IOException { } builder.endObject(); BytesReference mutated = XContentTestUtils.insertRandomFields(xContentType, BytesReference.bytes(builder), null, random()); - expectThrows(XContentParseException.class, () -> request.fromXContent(false, createParser(xContentType.xContent(), mutated))); + expectThrows(XContentParseException.class, () -> { + try (var parser = createParser(xContentType.xContent(), mutated)) { + request.fromXContent(false, parser); + } + }); } public void testValidation() { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresActionTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresActionTests.java index dfafcd0662290..f26cdfe2bea6c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/shards/TransportIndicesShardStoresActionTests.java @@ -67,9 +67,8 @@ void runTest() { request, future ); - assertTrue(future.isDone()); - final var response = future.actionGet(0L); + final var response = future.result(); assertThat(response.getFailures(), empty()); assertThat(response.getStoreStatuses(), anEmptyMap()); assertThat(shardsWithFailures, empty()); @@ -132,8 +131,7 @@ void runTest() { listExpected = false; assertFalse(future.isDone()); deterministicTaskQueue.runAllTasks(); - assertTrue(future.isDone()); - expectThrows(TaskCancelledException.class, () -> future.actionGet(0L)); + expectThrows(TaskCancelledException.class, future::result); } }); } @@ -153,9 +151,8 @@ void runTest() { assertFalse(future.isDone()); failOneRequest = true; deterministicTaskQueue.runAllTasks(); - assertTrue(future.isDone()); assertFalse(failOneRequest); - assertEquals("simulated", expectThrows(ElasticsearchException.class, () -> future.actionGet(0L)).getMessage()); + assertEquals("simulated", expectThrows(ElasticsearchException.class, future::result).getMessage()); } }); } diff --git a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java index 3af2639538f0d..ae25a5b597ec3 100644 --- a/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequestTests.java @@ -152,15 +152,15 @@ public void testToXContent() throws IOException { } public void testFromXContent() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, "{ \"fields\" : [\"FOO\"] }"); - FieldCapabilitiesRequest request = new FieldCapabilitiesRequest(); - ObjectParser PARSER = new ObjectParser<>("field_caps_request"); - PARSER.declareStringArray(fromList(String.class, FieldCapabilitiesRequest::fields), new ParseField("fields")); - - PARSER.parse(parser, request, null); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, "{ \"fields\" : [\"FOO\"] }")) { + FieldCapabilitiesRequest request = new FieldCapabilitiesRequest(); + ObjectParser PARSER = new ObjectParser<>("field_caps_request"); + PARSER.declareStringArray(fromList(String.class, FieldCapabilitiesRequest::fields), new ParseField("fields")); - assertArrayEquals(request.fields(), new String[] { "FOO" }); + PARSER.parse(parser, request, null); + assertArrayEquals(request.fields(), new String[] { "FOO" }); + } } public void testValidation() { diff --git a/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java b/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java index eb9cfa4a6939c..76fdef3d06c1f 100644 --- a/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/get/MultiGetRequestTests.java @@ -80,24 +80,25 @@ public void testUnexpectedField() throws IOException { } public void testAddWithValidSourceValueIsAccepted() throws Exception { - XContentParser parser = createParser( - XContentFactory.jsonBuilder() - .startObject() - .startArray("docs") - .startObject() - .field("_source", randomFrom("false", "true")) - .endObject() - .startObject() - .field("_source", randomBoolean()) - .endObject() - .endArray() - .endObject() - ); - - MultiGetRequest multiGetRequest = new MultiGetRequest(); - multiGetRequest.add(randomAlphaOfLength(5), null, FetchSourceContext.FETCH_SOURCE, null, parser, true); - - assertEquals(2, multiGetRequest.getItems().size()); + try ( + XContentParser parser = createParser( + XContentFactory.jsonBuilder() + .startObject() + .startArray("docs") + .startObject() + .field("_source", randomFrom("false", "true")) + .endObject() + .startObject() + .field("_source", randomBoolean()) + .endObject() + .endArray() + .endObject() + ) + ) { + MultiGetRequest multiGetRequest = new MultiGetRequest(); + multiGetRequest.add(randomAlphaOfLength(5), null, FetchSourceContext.FETCH_SOURCE, null, parser, true); + assertEquals(2, multiGetRequest.getItems().size()); + } } public void testXContentSerialization() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java b/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java index e6efc00209ba5..6f5841f3d2a03 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/GetPipelineResponseTests.java @@ -62,11 +62,15 @@ public void testXContentDeserialization() throws IOException { Map pipelinesMap = createPipelineConfigMap(); GetPipelineResponse response = new GetPipelineResponse(new ArrayList<>(pipelinesMap.values())); XContentBuilder builder = response.toXContent(getRandomXContentBuilder(), ToXContent.EMPTY_PARAMS); - XContentParser parser = builder.generator() - .contentType() - .xContent() - .createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput()); - GetPipelineResponse parsedResponse = GetPipelineResponse.fromXContent(parser); + GetPipelineResponse parsedResponse; + try ( + XContentParser parser = builder.generator() + .contentType() + .xContent() + .createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput()) + ) { + parsedResponse = GetPipelineResponse.fromXContent(parser); + } List actualPipelines = response.pipelines(); List parsedPipelines = parsedResponse.pipelines(); assertEquals(actualPipelines.size(), parsedPipelines.size()); diff --git a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java index 7f5b5f7716f3e..8cbcf4962e156 100644 --- a/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/AbstractSearchAsyncActionTests.java @@ -18,10 +18,10 @@ import org.elasticsearch.index.Index; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.internal.AliasFilter; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; @@ -194,7 +194,10 @@ public void testSendSearchResponseDisallowPartialFailures() { new IllegalArgumentException() ); } - action.sendSearchResponse(InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, phaseResults.results); + action.sendSearchResponse( + new SearchResponseSections(SearchHits.EMPTY_WITH_TOTAL_HITS, null, null, false, null, null, 1), + phaseResults.results + ); assertThat(exception.get(), instanceOf(SearchPhaseExecutionException.class)); SearchPhaseExecutionException searchPhaseExecutionException = (SearchPhaseExecutionException) exception.get(); assertEquals(0, searchPhaseExecutionException.getSuppressed().length); diff --git a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java index f8a22ec04fb15..4cac4a8a0445d 100644 --- a/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/ExpandSearchPhaseTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.collapse.CollapseBuilder; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -89,16 +88,10 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL List mSearchResponses = new ArrayList<>(numInnerHits); for (int innerHitNum = 0; innerHitNum < numInnerHits; innerHitNum++) { - InternalSearchResponse internalSearchResponse = new InternalSearchResponse( - collapsedHits.get(innerHitNum), - null, - null, - null, - false, - null, - 1 + mockSearchPhaseContext.sendSearchResponse( + new SearchResponseSections(collapsedHits.get(innerHitNum), null, null, false, null, null, 1), + null ); - mockSearchPhaseContext.sendSearchResponse(internalSearchResponse, null); mSearchResponses.add(new MultiSearchResponse.Item(mockSearchPhaseContext.searchResponse.get(), null)); } @@ -112,11 +105,10 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL SearchHit hit = new SearchHit(1, "ID"); hit.setDocumentField("someField", new DocumentField("someField", Collections.singletonList(collapseValue))); SearchHits hits = new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, () -> new SearchPhase("test") { + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { @Override public void run() { - mockSearchPhaseContext.sendSearchResponse(internalSearchResponse, null); + mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); } }); @@ -154,9 +146,14 @@ public void testFailOneItemFailsEntirePhase() throws IOException { @Override void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener listener) { assertTrue(executedMultiSearch.compareAndSet(false, true)); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(collapsedHits, null, null, null, false, null, 1); SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, + collapsedHits, + null, + null, + false, + null, + null, + 1, null, 1, 1, @@ -182,11 +179,10 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL SearchHit hit2 = new SearchHit(2, "ID2"); hit2.setDocumentField("someField", new DocumentField("someField", Collections.singletonList(collapseValue))); SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, () -> new SearchPhase("test") { + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { @Override public void run() { - mockSearchPhaseContext.sendSearchResponse(internalSearchResponse, null); + mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); } }); phase.run(); @@ -210,11 +206,10 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL SearchHit hit2 = new SearchHit(2, "ID2"); hit2.setDocumentField("someField", new DocumentField("someField", Collections.singletonList(null))); SearchHits hits = new SearchHits(new SearchHit[] { hit1, hit2 }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0F); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, () -> new SearchPhase("test") { + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { @Override public void run() { - mockSearchPhaseContext.sendSearchResponse(internalSearchResponse, null); + mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); } }); phase.run(); @@ -238,11 +233,10 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL ); SearchHits hits = new SearchHits(new SearchHit[0], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, () -> new SearchPhase("test") { + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { @Override public void run() { - mockSearchPhaseContext.sendSearchResponse(internalSearchResponse, null); + mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); } }); phase.run(); @@ -281,11 +275,10 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL .routing("baz"); SearchHits hits = new SearchHits(new SearchHit[0], new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); - ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, internalSearchResponse, () -> new SearchPhase("test") { + ExpandSearchPhase phase = new ExpandSearchPhase(mockSearchPhaseContext, hits, () -> new SearchPhase("test") { @Override - public void run() throws IOException { - mockSearchPhaseContext.sendSearchResponse(internalSearchResponse, null); + public void run() { + mockSearchPhaseContext.sendSearchResponse(new SearchResponseSections(hits, null, null, false, null, null, 1), null); } }); phase.run(); diff --git a/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java index 38409752c7e7d..01a71fe00b2fe 100644 --- a/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/FetchLookupFieldsPhaseTests.java @@ -17,7 +17,6 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.fetch.subphase.FieldAndFormat; import org.elasticsearch.search.fetch.subphase.LookupField; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; import java.util.List; @@ -46,8 +45,11 @@ void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionL searchHits[i] = SearchHitTests.createTestItem(randomBoolean(), randomBoolean()); } SearchHits hits = new SearchHits(searchHits, new TotalHits(numHits, TotalHits.Relation.EQUAL_TO), 1.0f); - InternalSearchResponse searchResponse = new InternalSearchResponse(hits, null, null, null, false, null, 1); - FetchLookupFieldsPhase phase = new FetchLookupFieldsPhase(searchPhaseContext, searchResponse, null); + FetchLookupFieldsPhase phase = new FetchLookupFieldsPhase( + searchPhaseContext, + new SearchResponseSections(hits, null, null, false, null, null, 1), + null + ); phase.run(); searchPhaseContext.assertNoFailure(); assertNotNull(searchPhaseContext.searchResponse.get()); @@ -95,18 +97,15 @@ void sendExecuteMultiSearch( } else { searchHits = new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 1.0f); } - InternalSearchResponse internalSearchResponse = new InternalSearchResponse( - searchHits, - null, - null, - null, - false, - null, - 1 - ); responses[i] = new MultiSearchResponse.Item( new SearchResponse( - internalSearchResponse, + searchHits, + null, + null, + false, + null, + null, + 1, null, 1, 1, @@ -174,8 +173,11 @@ void sendExecuteMultiSearch( ); } SearchHits searchHits = new SearchHits(new SearchHit[] { leftHit0, leftHit1 }, new TotalHits(2, TotalHits.Relation.EQUAL_TO), 1.0f); - InternalSearchResponse searchResponse = new InternalSearchResponse(searchHits, null, null, null, false, null, 1); - FetchLookupFieldsPhase phase = new FetchLookupFieldsPhase(searchPhaseContext, searchResponse, null); + FetchLookupFieldsPhase phase = new FetchLookupFieldsPhase( + searchPhaseContext, + new SearchResponseSections(searchHits, null, null, false, null, null, 1), + null + ); phase.run(); assertTrue(requestSent.get()); searchPhaseContext.assertNoFailure(); diff --git a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java index 71156517b0306..df3d4d76a14ee 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java +++ b/server/src/test/java/org/elasticsearch/action/search/MockSearchPhaseContext.java @@ -16,7 +16,6 @@ import org.elasticsearch.core.Releasables; import org.elasticsearch.search.SearchPhaseResult; import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.transport.Transport; @@ -83,7 +82,7 @@ public OriginalIndices getOriginalIndices(int shardIndex) { } @Override - public void sendSearchResponse(InternalSearchResponse internalSearchResponse, AtomicArray queryResults) { + public void sendSearchResponse(SearchResponseSections internalSearchResponse, AtomicArray queryResults) { String scrollId = getRequest().scroll() != null ? TransportSearchHelper.buildScrollId(queryResults) : null; String searchContextId = getRequest().pointInTimeBuilder() != null ? TransportSearchHelper.buildScrollId(queryResults) : null; searchResponse.set( diff --git a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java index 9b1ed6eee1028..f682e75b89a07 100644 --- a/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/MultiSearchActionTookTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AtomicArray; -import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; @@ -149,8 +149,7 @@ public void search(final SearchRequest request, final ActionListener queried = new HashSet<>(); TestSearchResponse() { - super(InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, null, 0, 0, 0, 0L, ShardSearchFailure.EMPTY_ARRAY, Clusters.EMPTY, null); + super( + SearchHits.EMPTY_WITH_TOTAL_HITS, + null, + null, + false, + null, + null, + 1, + null, + 0, + 0, + 0, + 0L, + ShardSearchFailure.EMPTY_ARRAY, + Clusters.EMPTY, + null + ); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchContextIdTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchContextIdTests.java index 90ac90738837d..32091780484fa 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchContextIdTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchContextIdTests.java @@ -85,5 +85,11 @@ public void testEncode() { assertThat(node3.getNode(), equalTo("node_3")); assertThat(node3.getSearchContextId().getId(), equalTo(42L)); assertThat(node3.getSearchContextId().getSessionId(), equalTo("c")); + + final String[] indices = SearchContextId.decodeIndices(id); + assertThat(indices.length, equalTo(3)); + assertThat(indices[0], equalTo("cluster_x:idx")); + assertThat(indices[1], equalTo("cluster_y:idy")); + assertThat(indices[2], equalTo("idy")); } } diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java index cd86a2e4f55d6..bfd949606c188 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchPhaseControllerTests.java @@ -50,7 +50,6 @@ import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.FetchSearchResult; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.profile.ProfileResult; @@ -294,7 +293,7 @@ public void testMerge() { profile ); try { - InternalSearchResponse mergedResponse = SearchPhaseController.merge(false, reducedQueryPhase, fetchResults); + SearchResponseSections mergedResponse = SearchPhaseController.merge(false, reducedQueryPhase, fetchResults); if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); } else { @@ -412,7 +411,7 @@ protected boolean lessThan(RankDoc a, RankDoc b) { false ); try { - InternalSearchResponse mergedResponse = SearchPhaseController.merge(false, reducedQueryPhase, fetchResults); + SearchResponseSections mergedResponse = SearchPhaseController.merge(false, reducedQueryPhase, fetchResults); if (trackTotalHits == SearchContext.TRACK_TOTAL_HITS_DISABLED) { assertNull(mergedResponse.hits.getTotalHits()); } else { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java index 0c8496081ff19..8c0ffeabf0ea6 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchRequestTests.java @@ -457,6 +457,42 @@ public void testValidate() throws IOException { assertEquals(1, validationErrors.validationErrors().size()); assertEquals("[rank] requires [explain] is [false]", validationErrors.validationErrors().get(0)); } + { + SearchRequest searchRequest = new SearchRequest("test").source( + new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder("")) + ); + ActionRequestValidationException validationErrors = searchRequest.validate(); + assertNotNull(validationErrors); + assertEquals(1, validationErrors.validationErrors().size()); + assertEquals( + "[indices] cannot be used with point in time. Do not specify any index with point in time.", + validationErrors.validationErrors().get(0) + ); + } + { + SearchRequest searchRequest = new SearchRequest().indicesOptions(IndicesOptions.LENIENT_EXPAND_OPEN_CLOSED) + .source(new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(""))); + ActionRequestValidationException validationErrors = searchRequest.validate(); + assertNotNull(validationErrors); + assertEquals(1, validationErrors.validationErrors().size()); + assertEquals("[indicesOptions] cannot be used with point in time", validationErrors.validationErrors().get(0)); + } + { + SearchRequest searchRequest = new SearchRequest().routing("route1") + .source(new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(""))); + ActionRequestValidationException validationErrors = searchRequest.validate(); + assertNotNull(validationErrors); + assertEquals(1, validationErrors.validationErrors().size()); + assertEquals("[routing] cannot be used with point in time", validationErrors.validationErrors().get(0)); + } + { + SearchRequest searchRequest = new SearchRequest().preference("pref1") + .source(new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder(""))); + ActionRequestValidationException validationErrors = searchRequest.validate(); + assertNotNull(validationErrors); + assertEquals(1, validationErrors.validationErrors().size()); + assertEquals("[preference] cannot be used with point in time", validationErrors.validationErrors().get(0)); + } } public void testCopyConstructor() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java index e57b204df0836..0f80572fdb7bc 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseMergerTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.aggregations.AggregatorFactories; import org.elasticsearch.search.aggregations.InternalAggregations; @@ -29,7 +30,6 @@ import org.elasticsearch.search.aggregations.bucket.range.Range; import org.elasticsearch.search.aggregations.metrics.Max; import org.elasticsearch.search.aggregations.metrics.MaxAggregationBuilder; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.profile.SearchProfileResultsTests; @@ -108,8 +108,7 @@ public void testMergeTookInMillis() throws InterruptedException { ) ) { for (int i = 0; i < numResponses; i++) { - SearchResponse searchResponse = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + SearchResponse searchResponse = SearchResponseUtils.emptyWithTotalHits( null, 1, 1, @@ -169,8 +168,7 @@ public void testMergeShardFailures() throws InterruptedException { shardSearchFailures[j] = failure; priorityQueue.add(Tuple.tuple(searchShardTarget, failure)); } - SearchResponse searchResponse = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + SearchResponse searchResponse = SearchResponseUtils.emptyWithTotalHits( null, 1, 1, @@ -231,8 +229,7 @@ public void testMergeShardFailuresNullShardTarget() throws InterruptedException shardSearchFailures[j] = failure; priorityQueue.add(Tuple.tuple(shardId, failure)); } - SearchResponse searchResponse = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + SearchResponse searchResponse = SearchResponseUtils.emptyWithTotalHits( null, 1, 1, @@ -291,8 +288,7 @@ public void testMergeShardFailuresNullShardId() throws InterruptedException { shardSearchFailures[j] = shardSearchFailure; expectedFailures.add(shardSearchFailure); } - SearchResponse searchResponse = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + SearchResponse searchResponse = SearchResponseUtils.emptyWithTotalHits( null, 1, 1, @@ -335,9 +331,14 @@ public void testMergeProfileResults() throws InterruptedException { SearchProfileResults profile = SearchProfileResultsTests.createTestItem(); expectedProfile.putAll(profile.getShardResults()); SearchHits searchHits = new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, null, profile, false, null, 1); SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, + searchHits, + null, + null, + false, + null, + profile, + 1, null, 1, 1, @@ -408,9 +409,14 @@ public void testMergeCompletionSuggestions() throws InterruptedException { suggestions.add(completionSuggestion); Suggest suggest = new Suggest(suggestions); SearchHits searchHits = new SearchHits(new SearchHit[0], null, Float.NaN); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, suggest, null, false, null, 1); SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, + searchHits, + null, + suggest, + false, + null, + null, + 1, null, 1, 1, @@ -489,9 +495,14 @@ public void testMergeCompletionSuggestionsTieBreak() throws InterruptedException suggestions.add(completionSuggestion); Suggest suggest = new Suggest(suggestions); SearchHits searchHits = new SearchHits(new SearchHit[0], null, Float.NaN); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, null, suggest, null, false, null, 1); SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, + searchHits, + null, + suggest, + false, + null, + null, + 1, null, 1, 1, @@ -566,9 +577,14 @@ public void testMergeEmptyFormat() throws InterruptedException { ) { for (Max max : Arrays.asList(max1, max2)) { InternalAggregations aggs = InternalAggregations.from(Arrays.asList(max)); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, aggs, null, null, false, null, 1); SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, + searchHits, + aggs, + null, + false, + null, + null, + 1, null, 1, 1, @@ -630,9 +646,14 @@ public void testMergeAggs() throws InterruptedException { InternalDateRange range = factory.create(rangeAggName, singletonList(bucket), DocValueFormat.RAW, false, emptyMap()); InternalAggregations aggs = InternalAggregations.from(Arrays.asList(range, max)); SearchHits searchHits = new SearchHits(new SearchHit[0], null, Float.NaN); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(searchHits, aggs, null, null, false, null, 1); SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, + searchHits, + aggs, + null, + false, + null, + null, + 1, null, 1, 1, @@ -787,18 +808,14 @@ public void testMergeSearchHits() throws InterruptedException { Boolean terminatedEarly = frequently() ? null : true; expectedTerminatedEarly = expectedTerminatedEarly == null ? terminatedEarly : expectedTerminatedEarly; - InternalSearchResponse internalSearchResponse = new InternalSearchResponse( + SearchResponse searchResponse = new SearchResponse( searchHits, null, null, - null, timedOut, terminatedEarly, - numReducePhases - ); - - SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, + null, + numReducePhases, null, total, successful, @@ -937,9 +954,14 @@ public void testMergeEmptySearchHitsWithNonEmpty() { null, null ); - InternalSearchResponse response = new InternalSearchResponse(searchHits, null, null, null, false, false, 1); SearchResponse searchResponse = new SearchResponse( - response, + searchHits, + null, + null, + false, + false, + null, + 1, null, 1, 1, @@ -963,9 +985,14 @@ public void testMergeEmptySearchHitsWithNonEmpty() { null, null ); - InternalSearchResponse response = new InternalSearchResponse(empty, null, null, null, false, false, 1); SearchResponse searchResponse = new SearchResponse( - response, + empty, + null, + null, + false, + false, + null, + 1, null, 1, 1, @@ -1015,9 +1042,14 @@ public void testMergeOnlyEmptyHits() { expectedTotalHits = new TotalHits(Math.min(previousValue + totalHits.value, trackTotalHitsUpTo), totalHitsRelation); } SearchHits empty = new SearchHits(new SearchHit[0], totalHits, Float.NaN, null, null, null); - InternalSearchResponse response = new InternalSearchResponse(empty, null, null, null, false, false, 1); SearchResponse searchResponse = new SearchResponse( - response, + empty, + null, + null, + false, + false, + null, + 1, null, 1, 1, diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index b45a04922c187..ef759279e095f 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -25,9 +25,9 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchHitsTests; import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.aggregations.AggregationsTests; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.profile.SearchProfileResultsTests; import org.elasticsearch.search.suggest.Suggest; @@ -107,42 +107,44 @@ private SearchResponse createTestItem(boolean minimal, ShardSearchFailure... sha int totalShards = randomIntBetween(1, Integer.MAX_VALUE); int successfulShards = randomIntBetween(0, totalShards); int skippedShards = randomIntBetween(0, totalShards); - InternalSearchResponse internalSearchResponse; + SearchResponse.Clusters clusters; + if (minimal) { + clusters = randomSimpleClusters(); + } else { + clusters = randomClusters(); + } if (minimal == false) { SearchHits hits = SearchHitsTests.createTestItem(true, true); InternalAggregations aggregations = aggregationsTests.createTestInstance(); Suggest suggest = SuggestTests.createTestItem(); SearchProfileResults profileResults = SearchProfileResultsTests.createTestItem(); - internalSearchResponse = new InternalSearchResponse( + return new SearchResponse( hits, aggregations, suggest, - profileResults, timedOut, terminatedEarly, - numReducePhases + profileResults, + numReducePhases, + null, + totalShards, + successfulShards, + skippedShards, + tookInMillis, + shardSearchFailures, + clusters ); } else { - internalSearchResponse = InternalSearchResponse.EMPTY_WITH_TOTAL_HITS; - } - - SearchResponse.Clusters clusters; - if (minimal) { - clusters = randomSimpleClusters(); - } else { - clusters = randomClusters(); + return SearchResponseUtils.emptyWithTotalHits( + null, + totalShards, + successfulShards, + skippedShards, + tookInMillis, + shardSearchFailures, + clusters + ); } - - return new SearchResponse( - internalSearchResponse, - null, - totalShards, - successfulShards, - skippedShards, - tookInMillis, - shardSearchFailures, - clusters - ); } /** @@ -381,15 +383,13 @@ public void testToXContent() throws IOException { SearchHit[] hits = new SearchHit[] { hit }; { SearchResponse response = new SearchResponse( - new InternalSearchResponse( - new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f), - null, - null, - null, - false, - null, - 1 - ), + new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f), + null, + null, + false, + null, + null, + 1, null, 0, 0, @@ -425,15 +425,13 @@ public void testToXContent() throws IOException { } { SearchResponse response = new SearchResponse( - new InternalSearchResponse( - new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f), - null, - null, - null, - false, - null, - 1 - ), + new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f), + null, + null, + false, + null, + null, + 1, null, 0, 0, @@ -477,15 +475,13 @@ public void testToXContent() throws IOException { } { SearchResponse response = new SearchResponse( - new InternalSearchResponse( - new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f), - null, - null, - null, - false, - null, - 1 - ), + new SearchHits(hits, new TotalHits(100, TotalHits.Relation.EQUAL_TO), 1.5f), + null, + null, + false, + null, + null, + 1, null, 20, 9, @@ -654,8 +650,7 @@ public void testSerialization() throws IOException { } public void testToXContentEmptyClusters() throws IOException { - SearchResponse searchResponse = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + SearchResponse searchResponse = SearchResponseUtils.emptyWithTotalHits( null, 1, 1, diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java index 1097174628e58..fb27d824417b1 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportMultiSearchActionTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.common.Randomness; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -166,8 +166,7 @@ public void search(final SearchRequest request, final ActionListener { counter.decrementAndGet(); - var response = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + var response = SearchResponseUtils.emptyWithTotalHits( null, 0, 0, diff --git a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java index 6230a24a0768f..9707df1a7dfd0 100644 --- a/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/TransportSearchActionTests.java @@ -69,7 +69,6 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.collapse.CollapseBuilder; import org.elasticsearch.search.internal.AliasFilter; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.sort.SortBuilders; @@ -77,6 +76,7 @@ import org.elasticsearch.search.suggest.term.TermSuggestionBuilder; import org.elasticsearch.search.vectors.KnnSearchBuilder; import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.test.transport.MockTransportService; @@ -480,16 +480,23 @@ private MockTransportService[] startTransport( } private static SearchResponse emptySearchResponse() { - InternalSearchResponse response = new InternalSearchResponse( + return new SearchResponse( new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), InternalAggregations.EMPTY, null, - null, false, null, - 1 + null, + 1, + null, + 1, + 1, + 0, + 100, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY, + null ); - return new SearchResponse(response, null, 1, 1, 0, 100, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY, null); } public void testCCSRemoteReduceMergeFails() throws Exception { @@ -1594,7 +1601,8 @@ protected void doWriteTo(StreamOutput out) throws IOException { actionFilters, null, null, - null + null, + new SearchTransportAPMMetrics(TelemetryProvider.NOOP.getMeterRegistry()) ); CountDownLatch latch = new CountDownLatch(1); diff --git a/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java b/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java index abc482a34a070..2ca914eb23c61 100644 --- a/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/PlainActionFutureTests.java @@ -34,13 +34,11 @@ public void onResponse(Object value) { // test all possible methods that can be interrupted final Runnable runnable = () -> { - final int method = randomIntBetween(0, 4); + final int method = randomIntBetween(0, 2); switch (method) { case 0 -> future.actionGet(); - case 1 -> future.actionGet("30s"); - case 2 -> future.actionGet(30000); - case 3 -> future.actionGet(TimeValue.timeValueSeconds(30)); - case 4 -> future.actionGet(30, TimeUnit.SECONDS); + case 1 -> future.actionGet(TimeValue.timeValueSeconds(30)); + case 2 -> future.actionGet(30, TimeUnit.SECONDS); default -> throw new AssertionError(method); } }; diff --git a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java index a787a50798e05..8bda62b91bc7e 100644 --- a/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/action/support/replication/BroadcastReplicationTests.java @@ -305,7 +305,7 @@ public BaseBroadcastResponse executeAndAssertImmediateResponse( ) { PlainActionFuture response = new PlainActionFuture<>(); ActionTestUtils.execute(broadcastAction, null, request, response); - return response.actionGet("5s"); + return response.actionGet(5, TimeUnit.SECONDS); } private void assertBroadcastResponse(int total, int successful, int failed, BaseBroadcastResponse response, Class exceptionClass) { diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java b/server/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java index 0df492b080254..be8255cd766c8 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java @@ -294,17 +294,18 @@ public void testStreamRequestLegacyVersion() throws IOException { public void testMultiParser() throws Exception { byte[] bytes = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/termvectors/multiRequest1.json"); - XContentParser data = createParser(JsonXContent.jsonXContent, bytes); - MultiTermVectorsRequest request = new MultiTermVectorsRequest(); - request.add(new TermVectorsRequest(), data); - checkParsedParameters(request); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, bytes)) { + MultiTermVectorsRequest request = new MultiTermVectorsRequest(); + request.add(new TermVectorsRequest(), parser); + checkParsedParameters(request); + } bytes = StreamsUtils.copyToBytesFromClasspath("/org/elasticsearch/action/termvectors/multiRequest2.json"); - data = createParser(JsonXContent.jsonXContent, new BytesArray(bytes)); - request = new MultiTermVectorsRequest(); - request.add(new TermVectorsRequest(), data); - - checkParsedParameters(request); + try (var parser = createParser(JsonXContent.jsonXContent, new BytesArray(bytes))) { + MultiTermVectorsRequest request = new MultiTermVectorsRequest(); + request.add(new TermVectorsRequest(), parser); + checkParsedParameters(request); + } } void checkParsedParameters(MultiTermVectorsRequest request) { diff --git a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java index bd789891f2330..735ae41558240 100644 --- a/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/update/UpdateRequestTests.java @@ -120,7 +120,9 @@ public void setUp() throws Exception { public void testFromXContent() throws Exception { UpdateRequest request = new UpdateRequest("test", "1"); // simple script - request.fromXContent(createParser(XContentFactory.jsonBuilder().startObject().field("script", "script1").endObject())); + try (var parser = createParser(XContentFactory.jsonBuilder().startObject().field("script", "script1").endObject())) { + request.fromXContent(parser); + } Script script = request.script(); assertThat(script, notNullValue()); assertThat(script.getIdOrCode(), equalTo("script1")); @@ -130,11 +132,13 @@ public void testFromXContent() throws Exception { assertThat(params, equalTo(emptyMap())); // simple verbose script - request.fromXContent( - createParser( + try ( + var parser = createParser( XContentFactory.jsonBuilder().startObject().startObject("script").field("source", "script1").endObject().endObject() ) - ); + ) { + request.fromXContent(parser); + } script = request.script(); assertThat(script, notNullValue()); assertThat(script.getIdOrCode(), equalTo("script1")); @@ -145,8 +149,8 @@ public void testFromXContent() throws Exception { // script with params request = new UpdateRequest("test", "1"); - request.fromXContent( - createParser( + try ( + var parser = createParser( XContentFactory.jsonBuilder() .startObject() .startObject("script") @@ -157,7 +161,9 @@ public void testFromXContent() throws Exception { .endObject() .endObject() ) - ); + ) { + request.fromXContent(parser); + } script = request.script(); assertThat(script, notNullValue()); assertThat(script.getIdOrCode(), equalTo("script1")); @@ -169,8 +175,8 @@ public void testFromXContent() throws Exception { assertThat(params.get("param1").toString(), equalTo("value1")); request = new UpdateRequest("test", "1"); - request.fromXContent( - createParser( + try ( + var parser = createParser( XContentFactory.jsonBuilder() .startObject() .startObject("script") @@ -181,7 +187,9 @@ public void testFromXContent() throws Exception { .endObject() .endObject() ) - ); + ) { + request.fromXContent(parser); + } script = request.script(); assertThat(script, notNullValue()); assertThat(script.getIdOrCode(), equalTo("script1")); @@ -194,8 +202,8 @@ public void testFromXContent() throws Exception { // script with params and upsert request = new UpdateRequest("test", "1"); - request.fromXContent( - createParser( + try ( + var parser = createParser( XContentFactory.jsonBuilder() .startObject() .startObject("script") @@ -212,7 +220,9 @@ public void testFromXContent() throws Exception { .endObject() .endObject() ) - ); + ) { + request.fromXContent(parser); + } script = request.script(); assertThat(script, notNullValue()); assertThat(script.getIdOrCode(), equalTo("script1")); @@ -231,8 +241,8 @@ public void testFromXContent() throws Exception { assertThat(((Map) upsertDoc.get("compound")).get("field2").toString(), equalTo("value2")); request = new UpdateRequest("test", "1"); - request.fromXContent( - createParser( + try ( + var parser = createParser( XContentFactory.jsonBuilder() .startObject() .startObject("upsert") @@ -249,7 +259,9 @@ public void testFromXContent() throws Exception { .endObject() .endObject() ) - ); + ) { + request.fromXContent(parser); + } script = request.script(); assertThat(script, notNullValue()); assertThat(script.getIdOrCode(), equalTo("script1")); @@ -265,8 +277,8 @@ public void testFromXContent() throws Exception { // script with doc request = new UpdateRequest("test", "1"); - request.fromXContent( - createParser( + try ( + var parser = createParser( XContentFactory.jsonBuilder() .startObject() .startObject("doc") @@ -277,7 +289,9 @@ public void testFromXContent() throws Exception { .endObject() .endObject() ) - ); + ) { + request.fromXContent(parser); + } Map doc = request.doc().sourceAsMap(); assertThat(doc.get("field1").toString(), equalTo("value1")); assertThat(((Map) doc.get("compound")).get("field2").toString(), equalTo("value2")); @@ -285,23 +299,30 @@ public void testFromXContent() throws Exception { public void testUnknownFieldParsing() throws Exception { UpdateRequest request = new UpdateRequest("test", "1"); - XContentParser contentParser = createParser(XContentFactory.jsonBuilder().startObject().field("unknown_field", "test").endObject()); - - XContentParseException ex = expectThrows(XContentParseException.class, () -> request.fromXContent(contentParser)); - assertEquals("[1:2] [UpdateRequest] unknown field [unknown_field]", ex.getMessage()); + try ( + XContentParser contentParser = createParser( + XContentFactory.jsonBuilder().startObject().field("unknown_field", "test").endObject() + ) + ) { + XContentParseException ex = expectThrows(XContentParseException.class, () -> request.fromXContent(contentParser)); + assertEquals("[1:2] [UpdateRequest] unknown field [unknown_field]", ex.getMessage()); + } UpdateRequest request2 = new UpdateRequest("test", "1"); - XContentParser unknownObject = createParser( - XContentFactory.jsonBuilder() - .startObject() - .field("script", "ctx.op = ctx._source.views == params.count ? 'delete' : 'none'") - .startObject("params") - .field("count", 1) - .endObject() - .endObject() - ); - ex = expectThrows(XContentParseException.class, () -> request2.fromXContent(unknownObject)); - assertEquals("[1:76] [UpdateRequest] unknown field [params]", ex.getMessage()); + try ( + XContentParser unknownObject = createParser( + XContentFactory.jsonBuilder() + .startObject() + .field("script", "ctx.op = ctx._source.views == params.count ? 'delete' : 'none'") + .startObject("params") + .field("count", 1) + .endObject() + .endObject() + ) + ) { + XContentParseException ex = expectThrows(XContentParseException.class, () -> request2.fromXContent(unknownObject)); + assertEquals("[1:76] [UpdateRequest] unknown field [params]", ex.getMessage()); + } } public void testFetchSourceParsing() throws Exception { @@ -543,9 +564,10 @@ public void testNoopDetection() throws Exception { ShardId shardId = new ShardId("test", "", 0); GetResult getResult = new GetResult("test", "1", 0, 1, 0, true, new BytesArray("{\"body\": \"foo\"}"), null, null); - UpdateRequest request = new UpdateRequest("test", "1").fromXContent( - createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"body\": \"foo\"}}")) - ); + UpdateRequest request; + try (var parser = createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"body\": \"foo\"}}"))) { + request = new UpdateRequest("test", "1").fromXContent(parser); + } UpdateHelper.Result result = UpdateHelper.prepareUpdateIndexRequest(shardId, request, getResult, true); @@ -558,15 +580,15 @@ public void testNoopDetection() throws Exception { assertThat(result.getResponseResult(), equalTo(DocWriteResponse.Result.UPDATED)); assertThat(result.updatedSourceAsMap().get("body").toString(), equalTo("foo")); - // Change the request to be a different doc - request = new UpdateRequest("test", "1").fromXContent( - createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"body\": \"bar\"}}")) - ); - result = UpdateHelper.prepareUpdateIndexRequest(shardId, request, getResult, true); + try (var parser = createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"body\": \"bar\"}}"))) { + // Change the request to be a different doc + request = new UpdateRequest("test", "1").fromXContent(parser); + result = UpdateHelper.prepareUpdateIndexRequest(shardId, request, getResult, true); - assertThat(result.action(), instanceOf(IndexRequest.class)); - assertThat(result.getResponseResult(), equalTo(DocWriteResponse.Result.UPDATED)); - assertThat(result.updatedSourceAsMap().get("body").toString(), equalTo("bar")); + assertThat(result.action(), instanceOf(IndexRequest.class)); + assertThat(result.getResponseResult(), equalTo(DocWriteResponse.Result.UPDATED)); + assertThat(result.updatedSourceAsMap().get("body").toString(), equalTo("bar")); + } } @@ -614,11 +636,11 @@ public void testToString() throws IOException { assertThat(request.toString(), equalTo(""" update {[test][1], doc_as_upsert[false], script[Script{type=inline, lang='mock', idOrCode='ctx._source.body = "foo"', \ options={}, params={}}], scripted_upsert[false], detect_noop[true]}""")); - request = new UpdateRequest("test", "1").fromXContent( - createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"body\": \"bar\"}}")) - ); - assertThat(request.toString(), equalTo(""" - update {[test][1], doc_as_upsert[false], doc[index {[null][null], source[{"body":"bar"}]}], \ - scripted_upsert[false], detect_noop[true]}""")); + try (var parser = createParser(JsonXContent.jsonXContent, new BytesArray("{\"doc\": {\"body\": \"bar\"}}"))) { + request = new UpdateRequest("test", "1").fromXContent(parser); + assertThat(request.toString(), equalTo(""" + update {[test][1], doc_as_upsert[false], doc[index {[null][null], source[{"body":"bar"}]}], \ + scripted_upsert[false], detect_noop[true]}""")); + } } } diff --git a/server/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java b/server/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java index 7b2795abfd62d..75439578448a4 100644 --- a/server/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/DiskUsageTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.admin.indices.stats.ShardStats; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.routing.RecoverySource.PeerRecoverySource; -import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.ShardRoutingHelper; import org.elasticsearch.cluster.routing.UnassignedInfo; @@ -137,14 +136,7 @@ public void testFillShardLevelInfo() { Map shardSizes = new HashMap<>(); Map shardDataSetSizes = new HashMap<>(); Map routingToPath = new HashMap<>(); - InternalClusterInfoService.buildShardLevelInfo( - RoutingTable.EMPTY_ROUTING_TABLE, - stats, - shardSizes, - shardDataSetSizes, - routingToPath, - new HashMap<>() - ); + InternalClusterInfoService.buildShardLevelInfo(stats, shardSizes, shardDataSetSizes, routingToPath, new HashMap<>()); assertThat( shardSizes, diff --git a/server/src/test/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorServiceTests.java index b9d1cb50444e3..18385b1d7ad44 100644 --- a/server/src/test/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/coordination/StableMasterHealthIndicatorServiceTests.java @@ -312,8 +312,11 @@ private static StableMasterHealthIndicatorService createStableMasterHealthIndica private Map xContentToMap(ToXContent xcontent) throws IOException { XContentBuilder builder = XContentFactory.jsonBuilder(); xcontent.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = XContentType.JSON.xContent() - .createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput()); - return parser.map(); + try ( + XContentParser parser = XContentType.JSON.xContent() + .createParser(xContentRegistry(), LoggingDeprecationHandler.INSTANCE, BytesReference.bytes(builder).streamInput()) + ) { + return parser.map(); + } } } diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java index e539087de7b8e..e8892278879b9 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexGraveyardTests.java @@ -72,9 +72,10 @@ public void testXContent() throws IOException { ) ); } - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - parser.nextToken(); // the beginning of the parser - assertThat(IndexGraveyard.fromXContent(parser), equalTo(graveyard)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + parser.nextToken(); // the beginning of the parser + assertThat(IndexGraveyard.fromXContent(parser), equalTo(graveyard)); + } } public void testChunking() { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java index 0680392ffb3f0..b4c9f670f66b6 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexMetadataTests.java @@ -112,8 +112,10 @@ public void testIndexMetadataSerialization() throws IOException { builder.startObject(); IndexMetadata.FORMAT.toXContent(builder, metadata); builder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - final IndexMetadata fromXContentMeta = IndexMetadata.fromXContent(parser); + final IndexMetadata fromXContentMeta; + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + fromXContentMeta = IndexMetadata.fromXContent(parser); + } assertEquals( "expected: " + Strings.toString(metadata) + "\nactual : " + Strings.toString(fromXContentMeta), metadata, diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ReservedStateMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ReservedStateMetadataTests.java index 2383c0b513ead..46be49ad7111f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ReservedStateMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ReservedStateMetadataTests.java @@ -63,9 +63,10 @@ private void xContentTest(boolean addHandlers, boolean addErrors) throws IOExcep builder.startObject(); meta.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - parser.nextToken(); // the beginning of the object - assertThat(ReservedStateMetadata.fromXContent(parser), equalTo(meta)); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + parser.nextToken(); // the beginning of the object + assertThat(ReservedStateMetadata.fromXContent(parser), equalTo(meta)); + } } public void testXContent() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetadataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetadataTests.java index cb681b57b58dd..e7f49bc773404 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetadataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/ToAndFromJsonMetadataTests.java @@ -138,7 +138,10 @@ public void testSimpleJsonFromAndTo() throws IOException { Metadata.FORMAT.toXContent(builder, metadata); builder.endObject(); - Metadata parsedMetadata = Metadata.Builder.fromXContent(createParser(builder)); + Metadata parsedMetadata; + try (var parser = createParser(builder)) { + parsedMetadata = Metadata.Builder.fromXContent(parser); + } // templates assertThat(parsedMetadata.templates().get("foo").name(), is("foo")); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java index e425b0e305050..aa4b4ec6dbbeb 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/AllocationIdTests.java @@ -142,8 +142,10 @@ public void testSerialization() throws IOException { allocationId = AllocationId.newRelocation(allocationId); } BytesReference bytes = BytesReference.bytes(allocationId.toXContent(XContentFactory.jsonBuilder(), ToXContent.EMPTY_PARAMS)); - AllocationId parsedAllocationId = AllocationId.fromXContent(createParser(JsonXContent.jsonXContent, bytes)); - assertEquals(allocationId, parsedAllocationId); + try (var parser = createParser(JsonXContent.jsonXContent, bytes)) { + AllocationId parsedAllocationId = AllocationId.fromXContent(parser); + assertEquals(allocationId, parsedAllocationId); + } } public void testEquals() { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimatorTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimatorTests.java index 62fd21defa676..f81d99c55e84e 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimatorTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/ExpectedShardSizeEstimatorTests.java @@ -30,33 +30,54 @@ import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_RESIZE_SOURCE_NAME_KEY; import static org.elasticsearch.cluster.metadata.IndexMetadata.INDEX_RESIZE_SOURCE_UUID_KEY; import static org.elasticsearch.cluster.routing.ExpectedShardSizeEstimator.getExpectedShardSize; +import static org.elasticsearch.cluster.routing.ExpectedShardSizeEstimator.shouldReserveSpaceForInitializingShard; import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting; +import static org.elasticsearch.index.IndexModule.INDEX_STORE_TYPE_SETTING; +import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE; +import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SNAPSHOT_PARTIAL_SETTING; import static org.hamcrest.Matchers.equalTo; public class ExpectedShardSizeEstimatorTests extends ESAllocationTestCase { private final long defaultValue = randomLongBetween(-1, 0); - public void testShouldFallbackToDefaultValue() { + public void testShouldFallbackToDefaultExpectedShardSize() { var state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata(index("my-index"))).build(); - var shard = newShardRouting("my-index", 0, randomIdentifier(), true, ShardRoutingState.INITIALIZING); + var shard = newShardRouting( + new ShardId("my-index", "_na_", 0), + randomIdentifier(), + true, + ShardRoutingState.INITIALIZING, + randomFrom(RecoverySource.EmptyStoreRecoverySource.INSTANCE, RecoverySource.ExistingStoreRecoverySource.INSTANCE) + ); var allocation = createRoutingAllocation(state, ClusterInfo.EMPTY, SnapshotShardSizeInfo.EMPTY); assertThat(getExpectedShardSize(shard, defaultValue, allocation), equalTo(defaultValue)); + assertFalse( + "Should NOT reserve space for locally initializing primaries", + shouldReserveSpaceForInitializingShard(shard, allocation) + ); } public void testShouldReadExpectedSizeFromClusterInfo() { var shardSize = randomLongBetween(100, 1000); var state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata(index("my-index"))).build(); - var shard = newShardRouting("my-index", 0, randomIdentifier(), true, ShardRoutingState.INITIALIZING); + var shard = newShardRouting( + new ShardId("my-index", "_na_", 0), + randomIdentifier(), + true, + ShardRoutingState.INITIALIZING, + RecoverySource.PeerRecoverySource.INSTANCE + ); var clusterInfo = createClusterInfo(shard, shardSize); var allocation = createRoutingAllocation(state, clusterInfo, SnapshotShardSizeInfo.EMPTY); assertThat(getExpectedShardSize(shard, defaultValue, allocation), equalTo(shardSize)); + assertTrue("Should reserve space for relocating shard", shouldReserveSpaceForInitializingShard(shard, allocation)); } public void testShouldReadExpectedSizeFromPrimaryWhenAddingNewReplica() { @@ -70,21 +91,39 @@ public void testShouldReadExpectedSizeFromPrimaryWhenAddingNewReplica() { var allocation = createRoutingAllocation(state, clusterInfo, SnapshotShardSizeInfo.EMPTY); assertThat(getExpectedShardSize(replica, defaultValue, allocation), equalTo(shardSize)); + assertTrue("Should reserve space for peer recovery", shouldReserveSpaceForInitializingShard(replica, allocation)); } public void testShouldReadExpectedSizeWhenInitializingFromSnapshot() { var snapshotShardSize = randomLongBetween(100, 1000); - var state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata(index("my-index"))).build(); + + var index = switch (randomIntBetween(0, 2)) { + // regular snapshot + case 0 -> index("my-index"); + // searchable snapshot + case 1 -> index("my-index").settings( + indexSettings(IndexVersion.current(), 1, 0) // + .put(INDEX_STORE_TYPE_SETTING.getKey(), SEARCHABLE_SNAPSHOT_STORE_TYPE) // + ); + // partial searchable snapshot + case 2 -> index("my-index").settings( + indexSettings(IndexVersion.current(), 1, 0) // + .put(INDEX_STORE_TYPE_SETTING.getKey(), SEARCHABLE_SNAPSHOT_STORE_TYPE) // + .put(SNAPSHOT_PARTIAL_SETTING.getKey(), true) // + ); + default -> throw new AssertionError("unexpected index type"); + }; + var state = ClusterState.builder(ClusterName.DEFAULT).metadata(metadata(index)).build(); var snapshot = new Snapshot("repository", new SnapshotId("snapshot-1", "na")); var indexId = new IndexId("my-index", "_na_"); var shard = newShardRouting( new ShardId("my-index", "_na_", 0), - null, + randomIdentifier(), true, - ShardRoutingState.UNASSIGNED, + ShardRoutingState.INITIALIZING, new RecoverySource.SnapshotRecoverySource(randomUUID(), snapshot, IndexVersion.current(), indexId) ); @@ -94,6 +133,14 @@ public void testShouldReadExpectedSizeWhenInitializingFromSnapshot() { var allocation = createRoutingAllocation(state, ClusterInfo.EMPTY, snapshotShardSizeInfo); assertThat(getExpectedShardSize(shard, defaultValue, allocation), equalTo(snapshotShardSize)); + if (state.metadata().index("my-index").isPartialSearchableSnapshot() == false) { + assertTrue("Should reserve space for snapshot restore", shouldReserveSpaceForInitializingShard(shard, allocation)); + } else { + assertFalse( + "Should NOT reserve space for partial searchable snapshot restore as they do not download all data during initialization", + shouldReserveSpaceForInitializingShard(shard, allocation) + ); + } } public void testShouldReadSizeFromClonedShard() { @@ -127,6 +174,10 @@ public void testShouldReadSizeFromClonedShard() { var allocation = createRoutingAllocation(state, clusterInfo, SnapshotShardSizeInfo.EMPTY); assertThat(getExpectedShardSize(target, defaultValue, allocation), equalTo(sourceShardSize)); + assertFalse( + "Should NOT reserve space when using fs hardlink for clone/shrink/split", + shouldReserveSpaceForInitializingShard(target, state.metadata()) + ); } private static RoutingAllocation createRoutingAllocation( diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java index be8807292350b..4640392d7b164 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/AllocationCommandsTests.java @@ -811,23 +811,24 @@ public void testXContent() throws Exception { ] } """; - XContentParser parser = createParser(JsonXContent.jsonXContent, commands); - // move two tokens, parser expected to be "on" `commands` field - parser.nextToken(); - parser.nextToken(); - - assertThat( - AllocationCommands.fromXContent(parser), - equalTo( - new AllocationCommands( - new AllocateEmptyPrimaryAllocationCommand("test", 1, "node1", true), - new AllocateStalePrimaryAllocationCommand("test", 2, "node1", true), - new AllocateReplicaAllocationCommand("test", 2, "node1"), - new MoveAllocationCommand("test", 3, "node2", "node3"), - new CancelAllocationCommand("test", 4, "node5", true) + try (XContentParser parser = createParser(JsonXContent.jsonXContent, commands)) { + // move two tokens, parser expected to be "on" `commands` field + parser.nextToken(); + parser.nextToken(); + + assertThat( + AllocationCommands.fromXContent(parser), + equalTo( + new AllocationCommands( + new AllocateEmptyPrimaryAllocationCommand("test", 1, "node1", true), + new AllocateStalePrimaryAllocationCommand("test", 2, "node1", true), + new AllocateReplicaAllocationCommand("test", 2, "node1"), + new MoveAllocationCommand("test", 3, "node2", "node3"), + new CancelAllocationCommand("test", 4, "node5", true) + ) ) - ) - ); + ); + } } @Override diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java index b4eba769543b8..5e3b6cd02f830 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/allocator/DesiredBalanceComputerTests.java @@ -16,6 +16,7 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.DiskUsage; import org.elasticsearch.cluster.ESAllocationTestCase; +import org.elasticsearch.cluster.RestoreInProgress; import org.elasticsearch.cluster.TestShardRoutingRoleStrategies; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.Metadata; @@ -40,9 +41,14 @@ import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.Maps; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.repositories.IndexId; +import org.elasticsearch.snapshots.InternalSnapshotsInfoService.SnapshotShard; +import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotId; import org.elasticsearch.snapshots.SnapshotShardSizeInfo; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.threadpool.ThreadPool; @@ -61,6 +67,7 @@ import java.util.function.Function; import static java.util.stream.Collectors.toMap; +import static org.elasticsearch.cluster.ClusterInfo.shardIdentifierFromRouting; import static org.elasticsearch.cluster.routing.ShardRoutingState.INITIALIZING; import static org.elasticsearch.cluster.routing.ShardRoutingState.RELOCATING; import static org.elasticsearch.cluster.routing.ShardRoutingState.STARTED; @@ -70,6 +77,7 @@ import static org.elasticsearch.test.MockLogAppender.assertThatLogger; import static org.hamcrest.Matchers.aMapWithSize; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.hasEntry; @@ -623,7 +631,7 @@ public void testDesiredBalanceShouldConvergeInABigCluster() { var thisShardSize = smallShardSizeDeviation(shardSize); var primaryNodeId = pickAndRemoveRandomValueFrom(remainingNodeIds); - shardSizes.put(ClusterInfo.shardIdentifierFromRouting(shardId, true), thisShardSize); + shardSizes.put(shardIdentifierFromRouting(shardId, true), thisShardSize); totalShardsSize += thisShardSize; if (primaryNodeId != null) { dataPath.put(new NodeAndShard(primaryNodeId, shardId), "/data"); @@ -642,7 +650,7 @@ public void testDesiredBalanceShouldConvergeInABigCluster() { ); for (int replica = 0; replica < replicas; replica++) { var replicaNodeId = primaryNodeId == null ? null : pickAndRemoveRandomValueFrom(remainingNodeIds); - shardSizes.put(ClusterInfo.shardIdentifierFromRouting(shardId, false), thisShardSize); + shardSizes.put(shardIdentifierFromRouting(shardId, false), thisShardSize); totalShardsSize += thisShardSize; if (replicaNodeId != null) { dataPath.put(new NodeAndShard(replicaNodeId, shardId), "/data"); @@ -862,6 +870,146 @@ private static ClusterInfo createClusterInfo(List diskUsages, MapnewHashMapWithExpectedSize(5); + var snapshotShardSizes = Maps.newHashMapWithExpectedSize(5); + + var routingTableBuilder = RoutingTable.builder(TestShardRoutingRoleStrategies.DEFAULT_ROLE_ONLY); + // index-1 is allocated according to the desired balance + var indexMetadata1 = IndexMetadata.builder("index-1").settings(indexSettings(IndexVersion.current(), 2, 0)).build(); + routingTableBuilder.add( + IndexRoutingTable.builder(indexMetadata1.getIndex()) + .addShard(newShardRouting(shardIdFrom(indexMetadata1, 0), "node-1", true, STARTED)) + .addShard(newShardRouting(shardIdFrom(indexMetadata1, 1), "node-2", true, STARTED)) + ); + shardSizeInfo.put(shardIdentifierFromRouting(shardIdFrom(indexMetadata1, 0), true), ByteSizeValue.ofGb(8).getBytes()); + shardSizeInfo.put(shardIdentifierFromRouting(shardIdFrom(indexMetadata1, 1), true), ByteSizeValue.ofGb(8).getBytes()); + + // index-2 & index-3 are restored as new from snapshot + var indexMetadata2 = IndexMetadata.builder("index-2") + .settings(indexSettings(IndexVersion.current(), 1, 0).put(IndexMetadata.INDEX_PRIORITY_SETTING.getKey(), 2)) + .build(); + routingTableBuilder.addAsNewRestore( + indexMetadata2, + new RecoverySource.SnapshotRecoverySource("restore", snapshot, IndexVersion.current(), indexIdFrom(indexMetadata2)), + Set.of() + ); + snapshotShardSizes.put( + new SnapshotShard(snapshot, indexIdFrom(indexMetadata2), shardIdFrom(indexMetadata2, 0)), + ByteSizeValue.ofGb(1).getBytes() + ); + + var indexMetadata3 = IndexMetadata.builder("index-3") + .settings(indexSettings(IndexVersion.current(), 2, 0).put(IndexMetadata.INDEX_PRIORITY_SETTING.getKey(), 1)) + .build(); + routingTableBuilder.addAsNewRestore( + indexMetadata3, + new RecoverySource.SnapshotRecoverySource("restore", snapshot, IndexVersion.current(), indexIdFrom(indexMetadata3)), + Set.of() + ); + snapshotShardSizes.put( + new SnapshotShard(snapshot, indexIdFrom(indexMetadata3), shardIdFrom(indexMetadata3, 0)), + ByteSizeValue.ofMb(512).getBytes() + ); + snapshotShardSizes.put( + new SnapshotShard(snapshot, indexIdFrom(indexMetadata3), shardIdFrom(indexMetadata3, 1)), + ByteSizeValue.ofMb(512).getBytes() + ); + + var clusterState = ClusterState.builder(ClusterName.DEFAULT) + .nodes(DiscoveryNodes.builder().add(newNode("node-1")).add(newNode("node-2"))) + .metadata(Metadata.builder().put(indexMetadata1, false).put(indexMetadata2, false).put(indexMetadata3, false).build()) + .routingTable(routingTableBuilder) + .customs( + Map.of( + RestoreInProgress.TYPE, + new RestoreInProgress.Builder().add( + new RestoreInProgress.Entry( + "restore", + snapshot, + RestoreInProgress.State.STARTED, + randomBoolean(), + List.of(indexMetadata2.getIndex().getName(), indexMetadata3.getIndex().getName()), + Map.ofEntries( + Map.entry(shardIdFrom(indexMetadata2, 0), new RestoreInProgress.ShardRestoreStatus(randomUUID())), + Map.entry(shardIdFrom(indexMetadata3, 0), new RestoreInProgress.ShardRestoreStatus(randomUUID())), + Map.entry(shardIdFrom(indexMetadata3, 1), new RestoreInProgress.ShardRestoreStatus(randomUUID())) + ) + ) + ).build() + ) + ) + .build(); + + var clusterInfo = createClusterInfo( + List.of( + // node-1 has enough space to only allocate the only [index-2] shard + new DiskUsage("node-1", "data-1", "/data", ByteSizeValue.ofGb(10).getBytes(), ByteSizeValue.ofGb(2).getBytes()), + // node-2 has enough space to only allocate both shards of [index-3] + new DiskUsage("node-2", "data-2", "/data", ByteSizeValue.ofGb(10).getBytes(), ByteSizeValue.ofGb(2).getBytes()) + ), + shardSizeInfo + ); + var snapshotShardSizeInfo = new SnapshotShardSizeInfo(snapshotShardSizes); + + var settings = Settings.EMPTY; + var allocation = new RoutingAllocation( + randomAllocationDeciders(settings, createBuiltInClusterSettings(settings)), + clusterState, + clusterInfo, + snapshotShardSizeInfo, + 0L + ); + var initialDesiredBalance = new DesiredBalance( + 1, + Map.ofEntries( + Map.entry(shardIdFrom(indexMetadata1, 0), new ShardAssignment(Set.of("node-1"), 1, 0, 0)), + Map.entry(shardIdFrom(indexMetadata1, 1), new ShardAssignment(Set.of("node-2"), 1, 0, 0)) + ) + ); + var nextDesiredBalance = createDesiredBalanceComputer(new BalancedShardsAllocator()).compute( + initialDesiredBalance, + new DesiredBalanceInput(2, allocation, List.of()), + queue(), + input -> true + ); + + assertThat( + nextDesiredBalance.assignments(), + anyOf( + equalTo( + Map.ofEntries( + Map.entry(shardIdFrom(indexMetadata1, 0), new ShardAssignment(Set.of("node-1"), 1, 0, 0)), + Map.entry(shardIdFrom(indexMetadata1, 1), new ShardAssignment(Set.of("node-2"), 1, 0, 0)), + Map.entry(shardIdFrom(indexMetadata2, 0), new ShardAssignment(Set.of("node-1"), 1, 0, 0)), + Map.entry(shardIdFrom(indexMetadata3, 0), new ShardAssignment(Set.of("node-2"), 1, 0, 0)), + Map.entry(shardIdFrom(indexMetadata3, 1), new ShardAssignment(Set.of("node-2"), 1, 0, 0)) + ) + ), + equalTo( + Map.ofEntries( + Map.entry(shardIdFrom(indexMetadata1, 0), new ShardAssignment(Set.of("node-1"), 1, 0, 0)), + Map.entry(shardIdFrom(indexMetadata1, 1), new ShardAssignment(Set.of("node-2"), 1, 0, 0)), + Map.entry(shardIdFrom(indexMetadata2, 0), new ShardAssignment(Set.of("node-2"), 1, 0, 0)), + Map.entry(shardIdFrom(indexMetadata3, 0), new ShardAssignment(Set.of("node-1"), 1, 0, 0)), + Map.entry(shardIdFrom(indexMetadata3, 1), new ShardAssignment(Set.of("node-1"), 1, 0, 0)) + ) + ) + ) + ); + } + + private static IndexId indexIdFrom(IndexMetadata indexMetadata) { + return new IndexId(indexMetadata.getIndex().getName(), indexMetadata.getIndex().getUUID()); + } + + private static ShardId shardIdFrom(IndexMetadata indexMetadata, int shardId) { + return new ShardId(indexMetadata.getIndex(), shardId); + } + public void testShouldLogComputationIteration() { checkIterationLogging( 999, @@ -943,7 +1091,7 @@ public ShardAllocationDecision decideShardAllocation(ShardRouting shard, Routing } private static Map.Entry indexSize(ClusterState clusterState, String name, long size, boolean primary) { - return Map.entry(ClusterInfo.shardIdentifierFromRouting(findShardId(clusterState, name), primary), size); + return Map.entry(shardIdentifierFromRouting(findShardId(clusterState, name), primary), size); } private static ShardId findShardId(ClusterState clusterState, String name) { diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java index b54480cdc0856..74d8bc62ff203 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/decider/DiskThresholdDeciderUnitTests.java @@ -43,15 +43,15 @@ import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; -import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; -import java.util.List; import java.util.Map; import static java.util.Collections.emptySet; +import static org.elasticsearch.cluster.ClusterInfo.shardIdentifierFromRouting; import static org.elasticsearch.cluster.routing.ExpectedShardSizeEstimator.getExpectedShardSize; +import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting; import static org.elasticsearch.index.IndexModule.INDEX_STORE_TYPE_SETTING; import static org.elasticsearch.snapshots.SearchableSnapshotsSettings.SEARCHABLE_SNAPSHOT_STORE_TYPE; import static org.hamcrest.Matchers.containsString; @@ -512,93 +512,64 @@ public void testShardSizeAndRelocatingSize() { } public void testTakesIntoAccountExpectedSizeForInitializingSearchableSnapshots() { - String mainIndexName = "test"; - Index index = new Index(mainIndexName, "1234"); - String anotherIndexName = "another_index"; - Index anotherIndex = new Index(anotherIndexName, "5678"); - Metadata metadata = Metadata.builder() - .put( - IndexMetadata.builder(mainIndexName) - .settings( - settings(IndexVersion.current()).put("index.uuid", "1234") - .put(INDEX_STORE_TYPE_SETTING.getKey(), SEARCHABLE_SNAPSHOT_STORE_TYPE) - ) - .numberOfShards(3) - .numberOfReplicas(1) - ) - .put( - IndexMetadata.builder(anotherIndexName) - .settings(settings(IndexVersion.current()).put("index.uuid", "5678")) - .numberOfShards(1) - .numberOfReplicas(1) - ) + + var searchableSnapshotIndex = IndexMetadata.builder("searchable_snapshot") + .settings(indexSettings(IndexVersion.current(), 3, 0).put(INDEX_STORE_TYPE_SETTING.getKey(), SEARCHABLE_SNAPSHOT_STORE_TYPE)) .build(); + var regularIndex = IndexMetadata.builder("regular_index").settings(indexSettings(IndexVersion.current(), 1, 0)).build(); + String nodeId = "node1"; - String anotherNodeId = "another_node"; - - List shards = new ArrayList<>(); - int anotherNodeShardCounter = 0; - int nodeShardCounter = 0; - Map initializingShardSizes = new HashMap<>(); - for (int i = 1; i <= 3; i++) { - int expectedSize = 10 * i; - shards.add(createShard(index, nodeId, nodeShardCounter++, expectedSize)); - if (randomBoolean()) { - ShardRouting initializingShard = ShardRoutingHelper.initialize( - ShardRouting.newUnassigned( - new ShardId(index, nodeShardCounter++), - true, - EmptyStoreRecoverySource.INSTANCE, - new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "foo"), - ShardRouting.Role.DEFAULT - ), - nodeId - ); - initializingShardSizes.put(ClusterInfo.shardIdentifierFromRouting(initializingShard), randomLongBetween(10, 50)); - shards.add(initializingShard); - } - // randomly add shards for non-searchable snapshot index - if (randomBoolean()) { - for (int j = 0; j < randomIntBetween(1, 5); j++) { - shards.add(createShard(anotherIndex, anotherNodeId, anotherNodeShardCounter++, expectedSize)); - } - } + var knownShardSizes = new HashMap(); + long unaccountedSearchableSnapshotSizes = 0; + long relocatingShardsSizes = 0; + + var searchableSnapshotIndexRoutingTableBuilder = IndexRoutingTable.builder(searchableSnapshotIndex.getIndex()); + for (int i = 0; i < searchableSnapshotIndex.getNumberOfShards(); i++) { + long expectedSize = randomLongBetween(10, 50); + // a searchable snapshot shard without corresponding entry in cluster info + ShardRouting startedShardWithExpectedSize = newShardRouting( + new ShardId(searchableSnapshotIndex.getIndex(), i), + nodeId, + true, + ShardRoutingState.STARTED, + expectedSize + ); + searchableSnapshotIndexRoutingTableBuilder.addShard(startedShardWithExpectedSize); + unaccountedSearchableSnapshotSizes += expectedSize; + } + var regularIndexRoutingTableBuilder = IndexRoutingTable.builder(regularIndex.getIndex()); + for (int i = 0; i < searchableSnapshotIndex.getNumberOfShards(); i++) { + var shardSize = randomLongBetween(10, 50); + // a shard relocating to this node + ShardRouting initializingShard = newShardRouting( + new ShardId(regularIndex.getIndex(), i), + nodeId, + true, + ShardRoutingState.INITIALIZING, + PeerRecoverySource.INSTANCE + ); + regularIndexRoutingTableBuilder.addShard(initializingShard); + knownShardSizes.put(shardIdentifierFromRouting(initializingShard), shardSize); + relocatingShardsSizes += shardSize; } - DiscoveryNode node = DiscoveryNodeUtils.builder(nodeId).roles(emptySet()).build(); - DiscoveryNode anotherNode = DiscoveryNodeUtils.builder(anotherNodeId).roles(emptySet()).build(); ClusterState clusterState = ClusterState.builder(ClusterName.DEFAULT) - .metadata(metadata) - .routingTable( - RoutingTable.builder() - .add( - shards.stream() - .filter(s -> s.getIndexName().equals(mainIndexName)) - .reduce(IndexRoutingTable.builder(index), IndexRoutingTable.Builder::addShard, (a, b) -> a) - ) - .add( - shards.stream() - .filter(s -> s.getIndexName().equals(anotherIndexName)) - .reduce(IndexRoutingTable.builder(anotherIndex), IndexRoutingTable.Builder::addShard, (a, b) -> a) - ) - .build() - ) - .nodes(DiscoveryNodes.builder().add(node).add(anotherNode).build()) + .metadata(Metadata.builder().put(searchableSnapshotIndex, false).put(regularIndex, false)) + .routingTable(RoutingTable.builder().add(searchableSnapshotIndexRoutingTableBuilder).add(regularIndexRoutingTableBuilder)) + .nodes(DiscoveryNodes.builder().add(newNode(nodeId)).build()) .build(); + RoutingAllocation allocation = new RoutingAllocation( null, clusterState, - new DevNullClusterInfo(Map.of(), Map.of(), initializingShardSizes), + new DevNullClusterInfo(Map.of(), Map.of(), knownShardSizes), null, 0 ); - long sizeOfUnaccountedShards = sizeOfUnaccountedShards( - allocation, - RoutingNodesHelper.routingNode(nodeId, node, shards.toArray(ShardRouting[]::new)), - false, - "/dev/null" + assertEquals( + unaccountedSearchableSnapshotSizes + relocatingShardsSizes, + sizeOfUnaccountedShards(allocation, clusterState.getRoutingNodes().node(nodeId), false, "/dev/null") ); - assertEquals(60L + initializingShardSizes.values().stream().mapToLong(Long::longValue).sum(), sizeOfUnaccountedShards); } private ShardRouting createShard(Index index, String nodeId, int i, int expectedSize) { @@ -620,6 +591,7 @@ public long sizeOfUnaccountedShards(RoutingAllocation allocation, RoutingNode no subtractShardsMovingAway, dataPath, allocation.clusterInfo(), + allocation.snapshotShardSizeInfo(), allocation.metadata(), allocation.routingTable(), allocation.unaccountedSearchableSnapshotSize(node) diff --git a/server/src/test/java/org/elasticsearch/common/geo/GeoBoundingBoxTests.java b/server/src/test/java/org/elasticsearch/common/geo/GeoBoundingBoxTests.java index 5e20f954d079d..e7b1404306920 100644 --- a/server/src/test/java/org/elasticsearch/common/geo/GeoBoundingBoxTests.java +++ b/server/src/test/java/org/elasticsearch/common/geo/GeoBoundingBoxTests.java @@ -29,10 +29,11 @@ public class GeoBoundingBoxTests extends ESTestCase { public void testInvalidParseInvalidWKT() throws IOException { XContentBuilder bboxBuilder = XContentFactory.jsonBuilder().startObject().field("wkt", "invalid").endObject(); - XContentParser parser = createParser(bboxBuilder); - parser.nextToken(); - ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> GeoBoundingBox.parseBoundingBox(parser)); - assertThat(e.getMessage(), equalTo("failed to parse WKT bounding box")); + try (XContentParser parser = createParser(bboxBuilder)) { + parser.nextToken(); + ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> GeoBoundingBox.parseBoundingBox(parser)); + assertThat(e.getMessage(), equalTo("failed to parse WKT bounding box")); + } } public void testInvalidParsePoint() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/common/logging/HeaderWarningTests.java b/server/src/test/java/org/elasticsearch/common/logging/HeaderWarningTests.java index 598cc8213e91f..997b076b328d9 100644 --- a/server/src/test/java/org/elasticsearch/common/logging/HeaderWarningTests.java +++ b/server/src/test/java/org/elasticsearch/common/logging/HeaderWarningTests.java @@ -318,8 +318,8 @@ public void testAddComplexWarning() { + ".monitoring-beats-mb => [.monitoring-beats-8-*],.monitoring-ent-search-mb => [.monitoring-ent-search-8-*]," + ".monitoring-es-mb => [.monitoring-es-8-*],.monitoring-kibana-mb => [.monitoring-kibana-8-*]," + ".monitoring-logstash-mb => [.monitoring-logstash-8-*],.profiling-ilm-lock => [.profiling-ilm-lock*]," - + ".slm-history => [.slm-history-5*],.watch-history-16 => [.watcher-history-16*]," - + "behavioral_analytics-events-default => [behavioral_analytics-events-*],ilm-history => [ilm-history-5*]," + + ".slm-history => [.slm-history-7*],.watch-history-16 => [.watcher-history-16*]," + + "behavioral_analytics-events-default => [behavioral_analytics-events-*],ilm-history => [ilm-history-7*]," + "logs => [logs-*-*],metrics => [metrics-*-*],profiling-events => [profiling-events*],profiling-executables => " + "[profiling-executables*],profiling-metrics => [profiling-metrics*],profiling-returnpads-private => " + "[.profiling-returnpads-private*],profiling-sq-executables => [.profiling-sq-executables*]," diff --git a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java index 965f305c3c23f..c94ab22352741 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/SettingsTests.java @@ -507,8 +507,10 @@ public void testToAndFromXContent() throws IOException { builder.startObject(); settings.toXContent(builder, new ToXContent.MapParams(Collections.singletonMap(Settings.FLAT_SETTINGS_PARAM, "" + flatSettings))); builder.endObject(); - XContentParser parser = createParser(builder); - Settings build = Settings.fromXContent(parser); + Settings build; + try (XContentParser parser = createParser(builder)) { + build = Settings.fromXContent(parser); + } assertEquals(5, build.size()); assertEquals(Arrays.asList("1", "2", "3"), build.getAsList("foo.bar.baz")); assertEquals(2, build.getAsInt("foo.foobar", 0).intValue()); diff --git a/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserUtilsTests.java b/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserUtilsTests.java index 1c4bbda1cd7fb..ee89f2f00ea53 100644 --- a/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserUtilsTests.java +++ b/server/src/test/java/org/elasticsearch/common/xcontent/XContentParserUtilsTests.java @@ -21,11 +21,15 @@ import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import org.hamcrest.Matchers; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Base64; import java.util.List; @@ -285,4 +289,35 @@ public void testParseTypedKeysObjectErrors() throws IOException { } } } + + public void testParseListWithIndex_IncrementsIndexBy1ForEachEntryInList() throws IOException { + String jsonString = """ + ["a", "b", "c"] + """; + + var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); + + List results; + var indices = new ArrayList(); + + try ( + XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON) + .createParser(parserConfig, jsonString.getBytes(StandardCharsets.UTF_8)) + ) { + if (jsonParser.currentToken() == null) { + jsonParser.nextToken(); + } + + results = XContentParserUtils.parseList(jsonParser, (parser, index) -> { + XContentParser.Token token = parser.currentToken(); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_STRING, token, parser); + indices.add(index); + + return parser.text(); + }); + } + + assertThat(results, Matchers.is(List.of("a", "b", "c"))); + assertThat(indices, Matchers.is(List.of(0, 1, 2))); + } } diff --git a/server/src/test/java/org/elasticsearch/health/HealthPeriodicLoggerTests.java b/server/src/test/java/org/elasticsearch/health/HealthPeriodicLoggerTests.java index 4203a984a8f07..c52b0f594ed71 100644 --- a/server/src/test/java/org/elasticsearch/health/HealthPeriodicLoggerTests.java +++ b/server/src/test/java/org/elasticsearch/health/HealthPeriodicLoggerTests.java @@ -21,11 +21,15 @@ import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.ESLogMessage; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.scheduler.SchedulerEngine; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.telemetry.TelemetryProvider; +import org.elasticsearch.telemetry.metric.LongGaugeMetric; +import org.elasticsearch.telemetry.metric.MeterRegistry; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.threadpool.TestThreadPool; @@ -40,8 +44,11 @@ import java.util.Set; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; +import java.util.function.BiConsumer; +import java.util.function.Consumer; import static org.elasticsearch.health.HealthStatus.GREEN; +import static org.elasticsearch.health.HealthStatus.RED; import static org.elasticsearch.health.HealthStatus.YELLOW; import static org.elasticsearch.test.ClusterServiceUtils.createClusterService; import static org.hamcrest.Matchers.equalTo; @@ -50,6 +57,7 @@ import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.isNull; import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; @@ -76,6 +84,14 @@ private HealthService getMockedHealthService() { return mock(HealthService.class); } + private MeterRegistry getMockedMeterRegistry() { + return mock(MeterRegistry.class); + } + + private TelemetryProvider getMockedTelemetryProvider() { + return mock(TelemetryProvider.class); + } + @Before public void setupServices() { threadPool = new TestThreadPool(getTestName()); @@ -104,8 +120,8 @@ public void testConvertToLoggedFields() { assertThat(loggerResults.size(), equalTo(results.size() + 2)); // test indicator status - assertThat(loggerResults.get(makeHealthStatusString("network_latency")), equalTo("green")); - assertThat(loggerResults.get(makeHealthStatusString("slow_task_assignment")), equalTo("yellow")); + assertThat(loggerResults.get(makeHealthStatusString("master_is_stable")), equalTo("green")); + assertThat(loggerResults.get(makeHealthStatusString("disk")), equalTo("yellow")); assertThat(loggerResults.get(makeHealthStatusString("shards_availability")), equalTo("yellow")); // test calculated overall status @@ -114,7 +130,7 @@ public void testConvertToLoggedFields() { // test calculated message assertThat( loggerResults.get(HealthPeriodicLogger.MESSAGE_FIELD), - equalTo(String.format(Locale.ROOT, "health=%s [shards_availability,slow_task_assignment]", overallStatus.xContentValue())) + equalTo(String.format(Locale.ROOT, "health=%s [disk,shards_availability]", overallStatus.xContentValue())) ); // test empty results @@ -394,18 +410,18 @@ public void testLoggingHappens() { ); mockAppender.addExpectation( new MockLogAppender.SeenEventExpectation( - "network_latency", + "master_is_stable", HealthPeriodicLogger.class.getCanonicalName(), Level.INFO, - String.format(Locale.ROOT, "%s=\"green\"", makeHealthStatusString("network_latency")) + String.format(Locale.ROOT, "%s=\"green\"", makeHealthStatusString("master_is_stable")) ) ); mockAppender.addExpectation( new MockLogAppender.SeenEventExpectation( - "slow_task_assignment", + "disk", HealthPeriodicLogger.class.getCanonicalName(), Level.INFO, - String.format(Locale.ROOT, "%s=\"yellow\"", makeHealthStatusString("slow_task_assignment")) + String.format(Locale.ROOT, "%s=\"yellow\"", makeHealthStatusString("disk")) ) ); mockAppender.addExpectation( @@ -423,6 +439,74 @@ public void testLoggingHappens() { testHealthPeriodicLogger = createAndInitHealthPeriodicLogger(this.clusterService, testHealthService, true); + // switch to Log only mode + this.clusterSettings.applySettings( + Settings.builder() + .put(HealthPeriodicLogger.OUTPUT_MODE_SETTING.getKey(), HealthPeriodicLogger.OutputMode.LOGS) + .put(HealthPeriodicLogger.ENABLED_SETTING.getKey(), true) + .build() + ); + + HealthPeriodicLogger spyHealthPeriodicLogger = spy(testHealthPeriodicLogger); + spyHealthPeriodicLogger.isHealthNode = true; + doAnswer(invocation -> { + spyHealthPeriodicLogger.resultsListener.onResponse(getTestIndicatorResults()); + return null; + }).when(spyHealthPeriodicLogger).tryToLogHealth(); + + SchedulerEngine.Event event = new SchedulerEngine.Event(HealthPeriodicLogger.HEALTH_PERIODIC_LOGGER_JOB_NAME, 0, 0); + spyHealthPeriodicLogger.triggered(event); + + try { + mockAppender.assertAllExpectationsMatched(); + } finally { + Loggers.removeAppender(periodicLoggerLogger, mockAppender); + mockAppender.stop(); + } + } + + public void testOutputModeNoLogging() { + MockLogAppender mockAppender = new MockLogAppender(); + mockAppender.start(); + mockAppender.addExpectation( + new MockLogAppender.UnseenEventExpectation( + "overall", + HealthPeriodicLogger.class.getCanonicalName(), + Level.INFO, + String.format(Locale.ROOT, "%s=\"yellow\"", makeHealthStatusString("overall")) + ) + ); + mockAppender.addExpectation( + new MockLogAppender.UnseenEventExpectation( + "master_is_stable", + HealthPeriodicLogger.class.getCanonicalName(), + Level.INFO, + String.format(Locale.ROOT, "%s=\"green\"", makeHealthStatusString("master_is_stable")) + ) + ); + mockAppender.addExpectation( + new MockLogAppender.UnseenEventExpectation( + "disk", + HealthPeriodicLogger.class.getCanonicalName(), + Level.INFO, + String.format(Locale.ROOT, "%s=\"yellow\"", makeHealthStatusString("disk")) + ) + ); + Logger periodicLoggerLogger = LogManager.getLogger(HealthPeriodicLogger.class); + Loggers.addAppender(periodicLoggerLogger, mockAppender); + + HealthService testHealthService = this.getMockedHealthService(); + + testHealthPeriodicLogger = createAndInitHealthPeriodicLogger(this.clusterService, testHealthService, true); + + // switch to Metrics only mode + this.clusterSettings.applySettings( + Settings.builder() + .put(HealthPeriodicLogger.OUTPUT_MODE_SETTING.getKey(), HealthPeriodicLogger.OutputMode.METRICS) + .put(HealthPeriodicLogger.ENABLED_SETTING.getKey(), true) + .build() + ); + HealthPeriodicLogger spyHealthPeriodicLogger = spy(testHealthPeriodicLogger); spyHealthPeriodicLogger.isHealthNode = true; doAnswer(invocation -> { @@ -439,25 +523,68 @@ public void testLoggingHappens() { Loggers.removeAppender(periodicLoggerLogger, mockAppender); mockAppender.stop(); } + } + + public void testMetricsMode() { + List logs = new ArrayList<>(); + List metrics = new ArrayList<>(); + BiConsumer metricWriter = (metric, value) -> metrics.add(value); + Consumer logWriter = msg -> logs.add(msg.asString()); + + HealthService testHealthService = this.getMockedHealthService(); + testHealthPeriodicLogger = createAndInitHealthPeriodicLogger(this.clusterService, testHealthService, true, metricWriter, logWriter); + + // switch to Metrics only mode + this.clusterSettings.applySettings( + Settings.builder() + .put(HealthPeriodicLogger.OUTPUT_MODE_SETTING.getKey(), HealthPeriodicLogger.OutputMode.METRICS) + .put(HealthPeriodicLogger.ENABLED_SETTING.getKey(), true) + .build() + ); + + HealthPeriodicLogger spyHealthPeriodicLogger = spy(testHealthPeriodicLogger); + spyHealthPeriodicLogger.isHealthNode = true; + List results = getTestIndicatorResultsWithRed(); + + doAnswer(invocation -> { + spyHealthPeriodicLogger.resultsListener.onResponse(results); + return null; + }).when(spyHealthPeriodicLogger).tryToLogHealth(); + + assertEquals(0, metrics.size()); + + SchedulerEngine.Event event = new SchedulerEngine.Event(HealthPeriodicLogger.HEALTH_PERIODIC_LOGGER_JOB_NAME, 0, 0); + spyHealthPeriodicLogger.triggered(event); + + assertEquals(0, logs.size()); + assertEquals(4, metrics.size()); } private List getTestIndicatorResults() { - var networkLatency = new HealthIndicatorResult("network_latency", GREEN, null, null, null, null); - var slowTasks = new HealthIndicatorResult("slow_task_assignment", YELLOW, null, null, null, null); + var networkLatency = new HealthIndicatorResult("master_is_stable", GREEN, null, null, null, null); + var slowTasks = new HealthIndicatorResult("disk", YELLOW, null, null, null, null); var shardsAvailable = new HealthIndicatorResult("shards_availability", YELLOW, null, null, null, null); return List.of(networkLatency, slowTasks, shardsAvailable); } private List getTestIndicatorResultsAllGreen() { - var networkLatency = new HealthIndicatorResult("network_latency", GREEN, null, null, null, null); - var slowTasks = new HealthIndicatorResult("slow_task_assignment", GREEN, null, null, null, null); + var networkLatency = new HealthIndicatorResult("master_is_stable", GREEN, null, null, null, null); + var slowTasks = new HealthIndicatorResult("disk", GREEN, null, null, null, null); var shardsAvailable = new HealthIndicatorResult("shards_availability", GREEN, null, null, null, null); return List.of(networkLatency, slowTasks, shardsAvailable); } + private List getTestIndicatorResultsWithRed() { + var networkLatency = new HealthIndicatorResult("master_is_stable", GREEN, null, null, null, null); + var slowTasks = new HealthIndicatorResult("disk", GREEN, null, null, null, null); + var shardsAvailable = new HealthIndicatorResult("shards_availability", RED, null, null, null, null); + + return List.of(networkLatency, slowTasks, shardsAvailable); + } + private String makeHealthStatusString(String key) { return String.format(Locale.ROOT, "%s.%s.status", HealthPeriodicLogger.HEALTH_FIELD_PREFIX, key); } @@ -467,10 +594,42 @@ private HealthPeriodicLogger createAndInitHealthPeriodicLogger( HealthService testHealthService, boolean enabled ) { - testHealthPeriodicLogger = HealthPeriodicLogger.create(Settings.EMPTY, clusterService, this.client, testHealthService); + return createAndInitHealthPeriodicLogger(clusterService, testHealthService, enabled, null, null); + } + + private HealthPeriodicLogger createAndInitHealthPeriodicLogger( + ClusterService clusterService, + HealthService testHealthService, + boolean enabled, + BiConsumer metricWriter, + Consumer logWriter + ) { + var provider = getMockedTelemetryProvider(); + var registry = getMockedMeterRegistry(); + doReturn(registry).when(provider).getMeterRegistry(); + if (metricWriter != null || logWriter != null) { + testHealthPeriodicLogger = HealthPeriodicLogger.create( + Settings.EMPTY, + clusterService, + this.client, + testHealthService, + provider, + metricWriter, + logWriter + ); + } else { + testHealthPeriodicLogger = HealthPeriodicLogger.create( + Settings.EMPTY, + clusterService, + this.client, + testHealthService, + provider + ); + } if (enabled) { clusterSettings.applySettings(Settings.builder().put(HealthPeriodicLogger.ENABLED_SETTING.getKey(), true).build()); } + return testHealthPeriodicLogger; } } diff --git a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java index 409023afc4576..2a2986d974b0d 100644 --- a/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java +++ b/server/src/test/java/org/elasticsearch/http/DefaultRestChannelTests.java @@ -495,7 +495,7 @@ public void testTraceStopped() { executeRequest(Settings.EMPTY, "request-host"); - verify(tracer).setAttribute(argThat(id -> id.getRawId().startsWith("rest-")), eq("http.status_code"), eq(200L)); + verify(tracer).setAttribute(argThat(id -> id.getSpanId().startsWith("rest-")), eq("http.status_code"), eq(200L)); verify(tracer).stopTrace(any(RestRequest.class)); } diff --git a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java index e2a2c72d3eae3..d3ad4dd8586d5 100644 --- a/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java +++ b/server/src/test/java/org/elasticsearch/index/codec/PerFieldMapperCodecTests.java @@ -15,11 +15,14 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.MapperTestUtils; +import org.elasticsearch.index.codec.bloomfilter.ES87BloomFilterPostingsFormat; +import org.elasticsearch.index.codec.postings.ES812PostingsFormat; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.test.ESTestCase; import java.io.IOException; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; public class PerFieldMapperCodecTests extends ESTestCase { @@ -27,23 +30,29 @@ public class PerFieldMapperCodecTests extends ESTestCase { public void testUseBloomFilter() throws IOException { PerFieldMapperCodec perFieldMapperCodec = createCodec(false, randomBoolean(), false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(true)); + assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES87BloomFilterPostingsFormat.class)); assertThat(perFieldMapperCodec.useBloomFilter("another_field"), is(false)); + assertThat(perFieldMapperCodec.getPostingsFormatForField("another_field"), instanceOf(ES812PostingsFormat.class)); } public void testUseBloomFilterWithTimestampFieldEnabled() throws IOException { PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(true)); + assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES87BloomFilterPostingsFormat.class)); assertThat(perFieldMapperCodec.useBloomFilter("another_field"), is(false)); + assertThat(perFieldMapperCodec.getPostingsFormatForField("another_field"), instanceOf(ES812PostingsFormat.class)); } public void testUseBloomFilterWithTimestampFieldEnabled_noTimeSeriesMode() throws IOException { PerFieldMapperCodec perFieldMapperCodec = createCodec(true, false, false); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(false)); + assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES812PostingsFormat.class)); } public void testUseBloomFilterWithTimestampFieldEnabled_disableBloomFilter() throws IOException { PerFieldMapperCodec perFieldMapperCodec = createCodec(true, true, true); assertThat(perFieldMapperCodec.useBloomFilter("_id"), is(false)); + assertThat(perFieldMapperCodec.getPostingsFormatForField("_id"), instanceOf(ES812PostingsFormat.class)); assertWarnings( "[index.bloom_filter_for_id_field.enabled] setting was deprecated in Elasticsearch and will be removed in a future release." ); diff --git a/server/src/test/java/org/elasticsearch/index/codec/postings/ES812PostingsFormatTests.java b/server/src/test/java/org/elasticsearch/index/codec/postings/ES812PostingsFormatTests.java new file mode 100644 index 0000000000000..b11ab47102288 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/postings/ES812PostingsFormatTests.java @@ -0,0 +1,138 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2022 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.postings; + +import org.apache.lucene.codecs.Codec; +import org.apache.lucene.codecs.CompetitiveImpactAccumulator; +import org.apache.lucene.codecs.lucene90.blocktree.FieldReader; +import org.apache.lucene.codecs.lucene90.blocktree.Stats; +import org.apache.lucene.document.Document; +import org.apache.lucene.document.Field; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.Impact; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.store.ByteArrayDataInput; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.tests.analysis.MockAnalyzer; +import org.apache.lucene.tests.index.BasePostingsFormatTestCase; +import org.apache.lucene.tests.util.TestUtil; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; + +public class ES812PostingsFormatTests extends BasePostingsFormatTestCase { + private final Codec codec = TestUtil.alwaysPostingsFormat(new ES812PostingsFormat()); + + @Override + protected Codec getCodec() { + return codec; + } + + /** Make sure the final sub-block(s) are not skipped. */ + public void testFinalBlock() throws Exception { + Directory d = newDirectory(); + IndexWriter w = new IndexWriter(d, new IndexWriterConfig(new MockAnalyzer(random()))); + for (int i = 0; i < 25; i++) { + Document doc = new Document(); + doc.add(newStringField("field", Character.toString((char) (97 + i)), Field.Store.NO)); + doc.add(newStringField("field", "z" + Character.toString((char) (97 + i)), Field.Store.NO)); + w.addDocument(doc); + } + w.forceMerge(1); + + DirectoryReader r = DirectoryReader.open(w); + assertEquals(1, r.leaves().size()); + FieldReader field = (FieldReader) r.leaves().get(0).reader().terms("field"); + // We should see exactly two blocks: one root block (prefix empty string) and one block for z* + // terms (prefix z): + Stats stats = field.getStats(); + assertEquals(0, stats.floorBlockCount); + assertEquals(2, stats.nonFloorBlockCount); + r.close(); + w.close(); + d.close(); + } + + public void testImpactSerialization() throws IOException { + // omit norms and omit freqs + doTestImpactSerialization(Collections.singletonList(new Impact(1, 1L))); + + // omit freqs + doTestImpactSerialization(Collections.singletonList(new Impact(1, 42L))); + // omit freqs with very large norms + doTestImpactSerialization(Collections.singletonList(new Impact(1, -100L))); + + // omit norms + doTestImpactSerialization(Collections.singletonList(new Impact(30, 1L))); + // omit norms with large freq + doTestImpactSerialization(Collections.singletonList(new Impact(500, 1L))); + + // freqs and norms, basic + doTestImpactSerialization( + Arrays.asList( + new Impact(1, 7L), + new Impact(3, 9L), + new Impact(7, 10L), + new Impact(15, 11L), + new Impact(20, 13L), + new Impact(28, 14L) + ) + ); + + // freqs and norms, high values + doTestImpactSerialization( + Arrays.asList( + new Impact(2, 2L), + new Impact(10, 10L), + new Impact(12, 50L), + new Impact(50, -100L), + new Impact(1000, -80L), + new Impact(1005, -3L) + ) + ); + } + + private void doTestImpactSerialization(List impacts) throws IOException { + CompetitiveImpactAccumulator acc = new CompetitiveImpactAccumulator(); + for (Impact impact : impacts) { + acc.add(impact.freq, impact.norm); + } + try (Directory dir = newDirectory()) { + try (IndexOutput out = dir.createOutput("foo", IOContext.DEFAULT)) { + ES812SkipWriter.writeImpacts(acc, out); + } + try (IndexInput in = dir.openInput("foo", IOContext.DEFAULT)) { + byte[] b = new byte[Math.toIntExact(in.length())]; + in.readBytes(b, 0, b.length); + List impacts2 = ES812ScoreSkipReader.readImpacts( + new ByteArrayDataInput(b), + new ES812ScoreSkipReader.MutableImpactList() + ); + assertEquals(impacts, impacts2); + } + } + } +} diff --git a/server/src/test/java/org/elasticsearch/index/codec/postings/ForUtilTests.java b/server/src/test/java/org/elasticsearch/index/codec/postings/ForUtilTests.java new file mode 100644 index 0000000000000..14e8d3344c3dc --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/codec/postings/ForUtilTests.java @@ -0,0 +1,99 @@ +/* + * @notice + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Modifications copyright (C) 2022 Elasticsearch B.V. + */ +package org.elasticsearch.index.codec.postings; + +import com.carrotsearch.randomizedtesting.generators.RandomNumbers; + +import org.apache.lucene.store.ByteBuffersDirectory; +import org.apache.lucene.store.Directory; +import org.apache.lucene.store.IOContext; +import org.apache.lucene.store.IndexInput; +import org.apache.lucene.store.IndexOutput; +import org.apache.lucene.tests.util.LuceneTestCase; +import org.apache.lucene.tests.util.TestUtil; +import org.apache.lucene.util.ArrayUtil; +import org.apache.lucene.util.packed.PackedInts; + +import java.io.IOException; +import java.util.Arrays; + +public class ForUtilTests extends LuceneTestCase { + + public void testEncodeDecode() throws IOException { + final int iterations = RandomNumbers.randomIntBetween(random(), 50, 1000); + final int[] values = new int[iterations * ForUtil.BLOCK_SIZE]; + + for (int i = 0; i < iterations; ++i) { + final int bpv = TestUtil.nextInt(random(), 1, 31); + for (int j = 0; j < ForUtil.BLOCK_SIZE; ++j) { + values[i * ForUtil.BLOCK_SIZE + j] = RandomNumbers.randomIntBetween(random(), 0, (int) PackedInts.maxValue(bpv)); + } + } + + final Directory d = new ByteBuffersDirectory(); + final long endPointer; + + { + // encode + IndexOutput out = d.createOutput("test.bin", IOContext.DEFAULT); + final ForUtil forUtil = new ForUtil(); + + for (int i = 0; i < iterations; ++i) { + long[] source = new long[ForUtil.BLOCK_SIZE]; + long or = 0; + for (int j = 0; j < ForUtil.BLOCK_SIZE; ++j) { + source[j] = values[i * ForUtil.BLOCK_SIZE + j]; + or |= source[j]; + } + final int bpv = PackedInts.bitsRequired(or); + out.writeByte((byte) bpv); + forUtil.encode(source, bpv, out); + } + endPointer = out.getFilePointer(); + out.close(); + } + + { + // decode + IndexInput in = d.openInput("test.bin", IOContext.READONCE); + final ForUtil forUtil = new ForUtil(); + for (int i = 0; i < iterations; ++i) { + final int bitsPerValue = in.readByte(); + final long currentFilePointer = in.getFilePointer(); + final long[] restored = new long[ForUtil.BLOCK_SIZE]; + forUtil.decode(bitsPerValue, in, restored); + int[] ints = new int[ForUtil.BLOCK_SIZE]; + for (int j = 0; j < ForUtil.BLOCK_SIZE; ++j) { + ints[j] = Math.toIntExact(restored[j]); + } + assertArrayEquals( + Arrays.toString(ints), + ArrayUtil.copyOfSubArray(values, i * ForUtil.BLOCK_SIZE, (i + 1) * ForUtil.BLOCK_SIZE), + ints + ); + assertEquals(forUtil.numBytes(bitsPerValue), in.getFilePointer() - currentFilePointer); + } + assertEquals(endPointer, in.getFilePointer()); + in.close(); + } + + d.close(); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTestUtils.java b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTestUtils.java index 2c1daa09340d7..3185769bdab82 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTestUtils.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTestUtils.java @@ -61,11 +61,11 @@ public static void pruneTombstones(LiveVersionMap map, long maxTimestampToPrune, map.pruneTombstones(maxTimestampToPrune, maxSeqNoToPrune); } - static IndexVersionValue randomIndexVersionValue() { + public static IndexVersionValue randomIndexVersionValue() { return new IndexVersionValue(randomTranslogLocation(), randomNonNegativeLong(), randomNonNegativeLong(), randomNonNegativeLong()); } - static Translog.Location randomTranslogLocation() { + public static Translog.Location randomTranslogLocation() { if (randomBoolean()) { return null; } else { @@ -93,6 +93,10 @@ public static boolean isSafeAccessRequired(LiveVersionMap map) { return map.isSafeAccessRequired(); } + public static void enforceSafeAccess(LiveVersionMap map) { + map.enforceSafeAccess(); + } + public static LiveVersionMapArchive getArchive(LiveVersionMap map) { return map.getArchive(); } diff --git a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java index a9b42ccdef248..5ca7aadc35fa7 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/LiveVersionMapTests.java @@ -13,7 +13,9 @@ import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; import org.apache.lucene.util.Constants; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.Releasable; +import org.elasticsearch.core.Tuple; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.ESTestCase; @@ -23,11 +25,16 @@ import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; +import java.util.function.Supplier; +import java.util.stream.IntStream; +import static org.elasticsearch.common.util.concurrent.ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency; +import static org.elasticsearch.core.Tuple.tuple; import static org.elasticsearch.index.engine.LiveVersionMapTestUtils.randomIndexVersionValue; import static org.elasticsearch.index.engine.LiveVersionMapTestUtils.randomTranslogLocation; import static org.hamcrest.Matchers.empty; @@ -36,7 +43,6 @@ import static org.hamcrest.Matchers.nullValue; public class LiveVersionMapTests extends ESTestCase { - public void testRamBytesUsed() throws Exception { LiveVersionMap map = new LiveVersionMap(); for (int i = 0; i < 100000; ++i) { @@ -442,4 +448,51 @@ public void testRandomlyIndexDeleteAndRefresh() throws Exception { } } } + + public void testVersionLookupRamBytesUsed() { + var vl = new LiveVersionMap.VersionLookup(newConcurrentMapWithAggressiveConcurrency()); + assertEquals(0, vl.ramBytesUsed()); + Set existingKeys = new HashSet<>(); + Supplier> randomEntry = () -> { + var key = randomBoolean() || existingKeys.isEmpty() ? uid(randomIdentifier()) : randomFrom(existingKeys); + return tuple(key, randomIndexVersionValue()); + }; + IntStream.range(0, randomIntBetween(10, 100)).forEach(i -> { + switch (randomIntBetween(0, 2)) { + case 0: // put + var entry = randomEntry.get(); + var previousValue = vl.put(entry.v1(), entry.v2()); + if (existingKeys.contains(entry.v1())) { + assertNotNull(previousValue); + } else { + assertNull(previousValue); + existingKeys.add(entry.v1()); + } + break; + case 1: // remove + if (existingKeys.isEmpty() == false) { + var key = randomFrom(existingKeys); + assertNotNull(vl.remove(key)); + existingKeys.remove(key); + } + break; + case 2: // merge + var toMerge = new LiveVersionMap.VersionLookup(ConcurrentCollections.newConcurrentMapWithAggressiveConcurrency()); + IntStream.range(0, randomIntBetween(1, 100)) + .mapToObj(n -> randomEntry.get()) + .forEach(kv -> toMerge.put(kv.v1(), kv.v2())); + vl.merge(toMerge); + existingKeys.addAll(toMerge.getMap().keySet()); + break; + default: + throw new IllegalStateException("branch value unexpected"); + } + }); + long actualRamBytesUsed = vl.getMap() + .entrySet() + .stream() + .mapToLong(entry -> LiveVersionMap.VersionLookup.mapEntryBytesUsed(entry.getKey(), entry.getValue())) + .sum(); + assertEquals(actualRamBytesUsed, vl.ramBytesUsed()); + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java index d8f063ece35c0..053e4226b3d79 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/BooleanScriptFieldTypeTests.java @@ -347,65 +347,66 @@ public void testDualingQueries() throws IOException { try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { List values = randomList(0, 2, ESTestCase::randomBoolean); String source = "{\"foo\": " + values + "}"; - XContentParser parser = createParser(JsonXContent.jsonXContent, source); - SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); - DocumentParserContext ctx = new TestDocumentParserContext(MappingLookup.EMPTY, sourceToParse) { - @Override - public XContentParser parser() { - return parser; + try (XContentParser parser = createParser(JsonXContent.jsonXContent, source)) { + SourceToParse sourceToParse = new SourceToParse("test", new BytesArray(source), XContentType.JSON); + DocumentParserContext ctx = new TestDocumentParserContext(MappingLookup.EMPTY, sourceToParse) { + @Override + public XContentParser parser() { + return parser; + } + }; + ctx.doc().add(new StoredField("_source", new BytesRef(source))); + + ctx.parser().nextToken(); + ctx.parser().nextToken(); + ctx.parser().nextToken(); + while (ctx.parser().nextToken() != Token.END_ARRAY) { + ootb.parse(ctx); } - }; - ctx.doc().add(new StoredField("_source", new BytesRef(source))); - - ctx.parser().nextToken(); - ctx.parser().nextToken(); - ctx.parser().nextToken(); - while (ctx.parser().nextToken() != Token.END_ARRAY) { - ootb.parse(ctx); - } - iw.addDocument(ctx.doc()); - try (DirectoryReader reader = iw.getReader()) { - IndexSearcher searcher = newSearcher(reader); - assertSameCount( - searcher, - source, - "*", - simpleMappedFieldType().existsQuery(mockContext()), - ootb.fieldType().existsQuery(mockContext()) - ); - boolean term = randomBoolean(); - assertSameCount( - searcher, - source, - term, - simpleMappedFieldType().termQuery(term, mockContext()), - ootb.fieldType().termQuery(term, mockContext()) - ); - List terms = randomList(0, 3, ESTestCase::randomBoolean); - assertSameCount( - searcher, - source, - terms, - simpleMappedFieldType().termsQuery(terms, mockContext()), - ootb.fieldType().termsQuery(terms, mockContext()) - ); - boolean low; - boolean high; - if (randomBoolean()) { - low = high = randomBoolean(); - } else { - low = false; - high = true; + iw.addDocument(ctx.doc()); + try (DirectoryReader reader = iw.getReader()) { + IndexSearcher searcher = newSearcher(reader); + assertSameCount( + searcher, + source, + "*", + simpleMappedFieldType().existsQuery(mockContext()), + ootb.fieldType().existsQuery(mockContext()) + ); + boolean term = randomBoolean(); + assertSameCount( + searcher, + source, + term, + simpleMappedFieldType().termQuery(term, mockContext()), + ootb.fieldType().termQuery(term, mockContext()) + ); + List terms = randomList(0, 3, ESTestCase::randomBoolean); + assertSameCount( + searcher, + source, + terms, + simpleMappedFieldType().termsQuery(terms, mockContext()), + ootb.fieldType().termsQuery(terms, mockContext()) + ); + boolean low; + boolean high; + if (randomBoolean()) { + low = high = randomBoolean(); + } else { + low = false; + high = true; + } + boolean includeLow = randomBoolean(); + boolean includeHigh = randomBoolean(); + assertSameCount( + searcher, + source, + (includeLow ? "[" : "(") + low + "," + high + (includeHigh ? "]" : ")"), + simpleMappedFieldType().rangeQuery(low, high, includeLow, includeHigh, null, null, null, mockContext()), + ootb.fieldType().rangeQuery(low, high, includeLow, includeHigh, null, null, null, mockContext()) + ); } - boolean includeLow = randomBoolean(); - boolean includeHigh = randomBoolean(); - assertSameCount( - searcher, - source, - (includeLow ? "[" : "(") + low + "," + high + (includeHigh ? "]" : ")"), - simpleMappedFieldType().rangeQuery(low, high, includeLow, includeHigh, null, null, null, mockContext()), - ootb.fieldType().rangeQuery(low, high, includeLow, includeHigh, null, null, null, mockContext()) - ); } } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java index 1f473d0ade35b..229d16ab85aef 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/CompletionFieldMapperTests.java @@ -223,7 +223,10 @@ public void testTypeParsing() throws Exception { XContentBuilder builder = jsonBuilder().startObject(); fieldMapper.toXContent(builder, new ToXContent.MapParams(Map.of("include_defaults", "true"))).endObject(); builder.close(); - Map serializedMap = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)).map(); + Map serializedMap; + try (var parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + serializedMap = parser.map(); + } Map configMap = (Map) serializedMap.get("field"); assertThat(configMap.get("analyzer").toString(), is("simple")); assertThat(configMap.get("search_analyzer").toString(), is("standard")); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DocCountFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DocCountFieldMapperTests.java index 079a79938c310..c1fd872e89f45 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DocCountFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DocCountFieldMapperTests.java @@ -104,8 +104,10 @@ public void testSyntheticSourceMany() throws IOException { LeafStoredFieldLoader storedFieldLoader = StoredFieldLoader.empty().getLoader(leaf, docIds); for (int docId : docIds) { String source = sourceLoaderLeaf.source(storedFieldLoader, docId).internalSourceRef().utf8ToString(); - int doc = (int) JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, source).map().get("doc"); - assertThat("doc " + docId, source, equalTo("{\"_doc_count\":" + counts.get(doc) + ",\"doc\":" + doc + "}")); + try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, source)) { + int doc = (int) parser.map().get("doc"); + assertThat("doc " + docId, source, equalTo("{\"_doc_count\":" + counts.get(doc) + ",\"doc\":" + doc + "}")); + } } } }); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DotExpandingXContentParserTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DotExpandingXContentParserTests.java index c55ffaaa70a16..f089648bce27f 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DotExpandingXContentParserTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DotExpandingXContentParserTests.java @@ -22,24 +22,32 @@ public class DotExpandingXContentParserTests extends ESTestCase { private void assertXContentMatches(String dotsExpanded, String withDots) throws IOException { - XContentParser inputParser = createParser(JsonXContent.jsonXContent, withDots); final ContentPath contentPath = new ContentPath(); - XContentParser expandedParser = DotExpandingXContentParser.expandDots(inputParser, contentPath); - expandedParser.allowDuplicateKeys(true); - - XContentBuilder actualOutput = XContentBuilder.builder(JsonXContent.jsonXContent).copyCurrentStructure(expandedParser); - assertEquals(dotsExpanded, Strings.toString(actualOutput)); - - XContentParser expectedParser = createParser(JsonXContent.jsonXContent, dotsExpanded); - expectedParser.allowDuplicateKeys(true); - XContentParser actualParser = DotExpandingXContentParser.expandDots(createParser(JsonXContent.jsonXContent, withDots), contentPath); - XContentParser.Token currentToken; - while ((currentToken = actualParser.nextToken()) != null) { - assertEquals(currentToken, expectedParser.nextToken()); - assertEquals(expectedParser.currentToken(), actualParser.currentToken()); - assertEquals(actualParser.currentToken().name(), expectedParser.currentName(), actualParser.currentName()); + try ( + XContentParser inputParser = createParser(JsonXContent.jsonXContent, withDots); + XContentParser expandedParser = DotExpandingXContentParser.expandDots(inputParser, contentPath) + ) { + expandedParser.allowDuplicateKeys(true); + + XContentBuilder actualOutput = XContentBuilder.builder(JsonXContent.jsonXContent).copyCurrentStructure(expandedParser); + assertEquals(dotsExpanded, Strings.toString(actualOutput)); + + try (XContentParser expectedParser = createParser(JsonXContent.jsonXContent, dotsExpanded)) { + expectedParser.allowDuplicateKeys(true); + try ( + var p = createParser(JsonXContent.jsonXContent, withDots); + XContentParser actualParser = DotExpandingXContentParser.expandDots(p, contentPath) + ) { + XContentParser.Token currentToken; + while ((currentToken = actualParser.nextToken()) != null) { + assertEquals(currentToken, expectedParser.nextToken()); + assertEquals(expectedParser.currentToken(), actualParser.currentToken()); + assertEquals(actualParser.currentToken().name(), expectedParser.currentName(), actualParser.currentName()); + } + assertNull(expectedParser.nextToken()); + } + } } - assertNull(expectedParser.nextToken()); } public void testEmbeddedObject() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java index aa4dec379f085..8627a236d6401 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/GeoPointFieldTypeTests.java @@ -11,15 +11,19 @@ import org.apache.lucene.tests.geo.GeoTestUtil; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.geo.SimpleFeatureFactory; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.utils.WellKnownBinary; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.script.ScriptCompiler; -import org.hamcrest.Matchers; import java.io.IOException; +import java.nio.ByteOrder; import java.util.ArrayList; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class GeoPointFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { @@ -36,31 +40,50 @@ public void testFetchSourceValue() throws IOException { Map otherJsonPoint = Map.of("type", "Point", "coordinates", List.of(30.0, 50.0)); String wktPoint = "POINT (42.0 27.1)"; String otherWktPoint = "POINT (30.0 50.0)"; + byte[] wkbPoint = WellKnownBinary.toWKB(new Point(42.0, 27.1), ByteOrder.LITTLE_ENDIAN); + byte[] otherWkbPoint = WellKnownBinary.toWKB(new Point(30.0, 50.0), ByteOrder.LITTLE_ENDIAN); // Test a single point in [lon, lat] array format. Object sourceValue = List.of(42.0, 27.1); assertEquals(List.of(jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + List wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbPoint)); // Test a single point in "lat, lon" string format. sourceValue = "27.1,42.0"; assertEquals(List.of(jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbPoint)); // Test a list of points in [lon, lat] array format. sourceValue = List.of(List.of(42.0, 27.1), List.of(30.0, 50.0)); assertEquals(List.of(jsonPoint, otherJsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint, otherWktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbPoint)); + assertThat(wkb.get(1), equalTo(otherWkbPoint)); // Test a list of points in [lat,lon] array format with one malformed sourceValue = List.of(List.of(42.0, 27.1), List.of("a", "b"), List.of(30.0, 50.0)); assertEquals(List.of(jsonPoint, otherJsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint, otherWktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbPoint)); + assertThat(wkb.get(1), equalTo(otherWkbPoint)); // Test a single point in well-known text format. sourceValue = "POINT (42.0 27.1)"; assertEquals(List.of(jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbPoint)); // Test a malformed value sourceValue = "malformed"; @@ -71,9 +94,13 @@ public void testFetchSourceValue() throws IOException { if (ignoreMalformed) { assertEquals(List.of(jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbPoint)); } else { assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkt")); + assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkb")); } // test single point in GeoJSON format @@ -110,13 +137,13 @@ public void testFetchVectorTile() throws IOException { final double lat = GeoTestUtil.nextLatitude(); final double lon = GeoTestUtil.nextLongitude(); List sourceValue = fetchSourceValue(mapper, List.of(lon, lat), mvtString); - assertThat(sourceValue.size(), Matchers.equalTo(1)); - assertThat(sourceValue.get(0), Matchers.equalTo(featureFactory.point(lon, lat))); + assertThat(sourceValue.size(), equalTo(1)); + assertThat(sourceValue.get(0), equalTo(featureFactory.point(lon, lat))); geoPoints.add(new GeoPoint(lat, lon)); values.add(List.of(lon, lat)); } List sourceValue = fetchSourceValue(mapper, values, mvtString); - assertThat(sourceValue.size(), Matchers.equalTo(1)); - assertThat(sourceValue.get(0), Matchers.equalTo(featureFactory.points(geoPoints))); + assertThat(sourceValue.size(), equalTo(1)); + assertThat(sourceValue.get(0), equalTo(featureFactory.points(geoPoints))); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IgnoreMalformedStoredValuesTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IgnoreMalformedStoredValuesTests.java index 60fe4c4a6d99f..b3684d420d728 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IgnoreMalformedStoredValuesTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IgnoreMalformedStoredValuesTests.java @@ -148,13 +148,13 @@ private static XContentParser ignoreMalformed(XContentType type, Object value) t private static StoredField ignoreMalformedStoredField(XContentType type, Object value) throws IOException { XContentBuilder b = XContentBuilder.builder(type.xContent()); b.startObject().field("name", value).endObject(); - XContentParser p = type.xContent().createParser(XContentParserConfiguration.EMPTY, BytesReference.bytes(b).streamInput()); - assertThat(p.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); - assertThat(p.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); - assertThat(p.currentName(), equalTo("name")); - p.nextToken(); - - return IgnoreMalformedStoredValues.storedField("foo.name", p); + try (XContentParser p = type.xContent().createParser(XContentParserConfiguration.EMPTY, BytesReference.bytes(b).streamInput())) { + assertThat(p.nextToken(), equalTo(XContentParser.Token.START_OBJECT)); + assertThat(p.nextToken(), equalTo(XContentParser.Token.FIELD_NAME)); + assertThat(p.currentName(), equalTo("name")); + p.nextToken(); + return IgnoreMalformedStoredValues.storedField("foo.name", p); + } } private static XContentParser parserFrom(IgnoreMalformedStoredValues values, String fieldName) throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java index dfbfb737c9ab2..3eb4675d37e97 100644 --- a/server/src/test/java/org/elasticsearch/index/store/StoreTests.java +++ b/server/src/test/java/org/elasticsearch/index/store/StoreTests.java @@ -775,22 +775,22 @@ public void testStoreStats() throws IOException { final long localStoreSizeDelta = randomLongBetween(-initialStoreSize, initialStoreSize); final long reservedBytes = randomBoolean() ? StoreStats.UNKNOWN_RESERVED_BYTES : randomLongBetween(0L, Integer.MAX_VALUE); StoreStats stats = store.stats(reservedBytes, size -> size + localStoreSizeDelta); - assertEquals(initialStoreSize, stats.totalDataSetSize().getBytes()); - assertEquals(initialStoreSize + localStoreSizeDelta, stats.getSize().getBytes()); - assertEquals(reservedBytes, stats.getReservedSize().getBytes()); + assertEquals(initialStoreSize, stats.totalDataSetSizeInBytes()); + assertEquals(initialStoreSize + localStoreSizeDelta, stats.sizeInBytes()); + assertEquals(reservedBytes, stats.reservedSizeInBytes()); stats.add(null); - assertEquals(initialStoreSize, stats.totalDataSetSize().getBytes()); - assertEquals(initialStoreSize + localStoreSizeDelta, stats.getSize().getBytes()); - assertEquals(reservedBytes, stats.getReservedSize().getBytes()); + assertEquals(initialStoreSize, stats.totalDataSetSizeInBytes()); + assertEquals(initialStoreSize + localStoreSizeDelta, stats.sizeInBytes()); + assertEquals(reservedBytes, stats.reservedSizeInBytes()); final long otherStatsDataSetBytes = randomLongBetween(0L, Integer.MAX_VALUE); final long otherStatsLocalBytes = randomLongBetween(0L, Integer.MAX_VALUE); final long otherStatsReservedBytes = randomBoolean() ? StoreStats.UNKNOWN_RESERVED_BYTES : randomLongBetween(0L, Integer.MAX_VALUE); stats.add(new StoreStats(otherStatsLocalBytes, otherStatsDataSetBytes, otherStatsReservedBytes)); - assertEquals(initialStoreSize + otherStatsDataSetBytes, stats.totalDataSetSize().getBytes()); - assertEquals(initialStoreSize + otherStatsLocalBytes + localStoreSizeDelta, stats.getSize().getBytes()); - assertEquals(Math.max(reservedBytes, 0L) + Math.max(otherStatsReservedBytes, 0L), stats.getReservedSize().getBytes()); + assertEquals(initialStoreSize + otherStatsDataSetBytes, stats.totalDataSetSizeInBytes()); + assertEquals(initialStoreSize + otherStatsLocalBytes + localStoreSizeDelta, stats.sizeInBytes()); + assertEquals(Math.max(reservedBytes, 0L) + Math.max(otherStatsReservedBytes, 0L), stats.reservedSizeInBytes()); Directory dir = store.directory(); final long length; @@ -805,8 +805,8 @@ public void testStoreStats() throws IOException { assertTrue(numNonExtraFiles(store) > 0); stats = store.stats(0L, size -> size + localStoreSizeDelta); - assertEquals(initialStoreSize + length, stats.totalDataSetSize().getBytes()); - assertEquals(initialStoreSize + localStoreSizeDelta + length, stats.getSizeInBytes()); + assertEquals(initialStoreSize + length, stats.totalDataSetSizeInBytes()); + assertEquals(initialStoreSize + localStoreSizeDelta + length, stats.sizeInBytes()); deleteContent(store.directory()); IOUtils.close(store); diff --git a/server/src/test/java/org/elasticsearch/indices/TermsLookupTests.java b/server/src/test/java/org/elasticsearch/indices/TermsLookupTests.java index dc2b7614fb52c..8f4bb9d9e3c5d 100644 --- a/server/src/test/java/org/elasticsearch/indices/TermsLookupTests.java +++ b/server/src/test/java/org/elasticsearch/indices/TermsLookupTests.java @@ -63,14 +63,15 @@ public void testSerialization() throws IOException { } public void testXContentParsing() throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, """ - { "index" : "index", "id" : "id", "path" : "path", "routing" : "routing" }"""); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, """ + { "index" : "index", "id" : "id", "path" : "path", "routing" : "routing" }""")) { - TermsLookup tl = TermsLookup.parseTermsLookup(parser); - assertEquals("index", tl.index()); - assertEquals("id", tl.id()); - assertEquals("path", tl.path()); - assertEquals("routing", tl.routing()); + TermsLookup tl = TermsLookup.parseTermsLookup(parser); + assertEquals("index", tl.index()); + assertEquals("id", tl.id()); + assertEquals("path", tl.path()); + assertEquals("routing", tl.routing()); + } } public static TermsLookup randomTermsLookup() { diff --git a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java index c20c9615573d6..a517d09b2aefe 100644 --- a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java @@ -346,6 +346,7 @@ public void testParentTriggersG1GCBeforeBreaking() throws InterruptedException, AtomicReference> onOverLimit = new AtomicReference<>(leader -> {}); AtomicLong time = new AtomicLong(randomLongBetween(Long.MIN_VALUE / 2, Long.MAX_VALUE / 2)); long interval = randomLongBetween(1, 1000); + long fullGCInterval = randomLongBetween(500, 2000); final HierarchyCircuitBreakerService service = new HierarchyCircuitBreakerService( CircuitBreakerMetrics.NOOP, clusterSettings, @@ -357,6 +358,8 @@ public void testParentTriggersG1GCBeforeBreaking() throws InterruptedException, HierarchyCircuitBreakerService.createYoungGcCountSupplier(), time::get, interval, + fullGCInterval, + TimeValue.timeValueSeconds(30), TimeValue.timeValueSeconds(30) ) { @@ -481,6 +484,7 @@ public void testG1OverLimitStrategyBreakOnMemory() { AtomicInteger leaderTriggerCount = new AtomicInteger(); AtomicInteger nonLeaderTriggerCount = new AtomicInteger(); long interval = randomLongBetween(1, 1000); + long fullGCInterval = randomLongBetween(500, 2000); AtomicLong memoryUsage = new AtomicLong(); HierarchyCircuitBreakerService.G1OverLimitStrategy strategy = new HierarchyCircuitBreakerService.G1OverLimitStrategy( @@ -489,6 +493,8 @@ public void testG1OverLimitStrategyBreakOnMemory() { () -> 0, time::get, interval, + fullGCInterval, + TimeValue.timeValueSeconds(30), TimeValue.timeValueSeconds(30) ) { @Override @@ -535,6 +541,7 @@ public void testG1OverLimitStrategyBreakOnGcCount() { AtomicInteger leaderTriggerCount = new AtomicInteger(); AtomicInteger nonLeaderTriggerCount = new AtomicInteger(); long interval = randomLongBetween(1, 1000); + long fullGCInterval = randomLongBetween(500, 2000); AtomicLong memoryUsageCounter = new AtomicLong(); AtomicLong gcCounter = new AtomicLong(); LongSupplier memoryUsageSupplier = () -> { @@ -547,6 +554,8 @@ public void testG1OverLimitStrategyBreakOnGcCount() { gcCounter::incrementAndGet, time::get, interval, + fullGCInterval, + TimeValue.timeValueSeconds(30), TimeValue.timeValueSeconds(30) ) { @@ -569,13 +578,15 @@ void overLimitTriggered(boolean leader) { assertThat(strategy.overLimit(input), sameInstance(input)); assertThat(leaderTriggerCount.get(), equalTo(1)); assertThat(gcCounter.get(), equalTo(2L)); - assertThat(memoryUsageCounter.get(), equalTo(2L)); // 1 before gc count break and 1 to get resulting memory usage. + // 1 before gc count break, 1 for full GC check and 1 to get resulting memory usage. + assertThat(memoryUsageCounter.get(), equalTo(3L)); } public void testG1OverLimitStrategyThrottling() throws InterruptedException, BrokenBarrierException, TimeoutException { AtomicLong time = new AtomicLong(randomLongBetween(Long.MIN_VALUE / 2, Long.MAX_VALUE / 2)); AtomicInteger leaderTriggerCount = new AtomicInteger(); long interval = randomLongBetween(1, 1000); + long fullGCInterval = randomLongBetween(500, 2000); AtomicLong memoryUsage = new AtomicLong(); HierarchyCircuitBreakerService.G1OverLimitStrategy strategy = new HierarchyCircuitBreakerService.G1OverLimitStrategy( JvmInfo.jvmInfo(), @@ -583,6 +594,8 @@ public void testG1OverLimitStrategyThrottling() throws InterruptedException, Bro () -> 0, time::get, interval, + fullGCInterval, + TimeValue.timeValueSeconds(30), TimeValue.timeValueSeconds(30) ) { @@ -661,6 +674,8 @@ public void testG1LockTimeout() throws Exception { gcCounter::incrementAndGet, () -> 0, 1, + 1, + TimeValue.timeValueMillis(randomFrom(0, 5, 10)), TimeValue.timeValueMillis(randomFrom(0, 5, 10)) ) { diff --git a/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java b/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java index 166b830d14301..71e90e8f4cc06 100644 --- a/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/IngestDocumentTests.java @@ -339,7 +339,7 @@ public void testSimpleSetFieldValue() { } public void testSetFieldValueNullValue() { - ingestDocument.setFieldValue("new_field", null); + ingestDocument.setFieldValue("new_field", (Object) null); assertThat(ingestDocument.getSourceAndMetadata().containsKey("new_field"), equalTo(true)); assertThat(ingestDocument.getSourceAndMetadata().get("new_field"), nullValue()); } diff --git a/server/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java b/server/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java index 84b7db7301597..0f18d04d8ac9c 100644 --- a/server/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java +++ b/server/src/test/java/org/elasticsearch/ingest/ValueSourceTests.java @@ -45,7 +45,7 @@ public void testCopyDoesNotChangeProvidedMap() { IngestDocument ingestDocument = TestIngestDocument.emptyIngestDocument(); ingestDocument.setFieldValue( - new TestTemplateService.MockTemplateScript.Factory("field1"), + ingestDocument.renderTemplate(new TestTemplateService.MockTemplateScript.Factory("field1")), ValueSource.wrap(myPreciousMap, TestTemplateService.instance()) ); ingestDocument.removeField("field1.field2"); @@ -60,7 +60,7 @@ public void testCopyDoesNotChangeProvidedList() { IngestDocument ingestDocument = TestIngestDocument.emptyIngestDocument(); ingestDocument.setFieldValue( - new TestTemplateService.MockTemplateScript.Factory("field1"), + ingestDocument.renderTemplate(new TestTemplateService.MockTemplateScript.Factory("field1")), ValueSource.wrap(myPreciousList, TestTemplateService.instance()) ); ingestDocument.removeField("field1.0"); diff --git a/server/src/test/java/org/elasticsearch/plugins/ExtensionLoaderTests.java b/server/src/test/java/org/elasticsearch/plugins/ExtensionLoaderTests.java index 35f65ebedf9b9..f9647c27e0acb 100644 --- a/server/src/test/java/org/elasticsearch/plugins/ExtensionLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/ExtensionLoaderTests.java @@ -20,10 +20,12 @@ import java.util.HashMap; import java.util.Locale; import java.util.Map; +import java.util.Optional; import java.util.ServiceLoader; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; import static org.hamcrest.Matchers.nullValue; @@ -68,21 +70,16 @@ public int getValue() { """, name, value); } - public void testNoProviderNullFallback() { - TestService service = ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class), () -> null); - assertThat(service, nullValue()); - } - public void testNoProvider() { - TestService service = ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class), () -> () -> 2); - assertThat(service, not(nullValue())); - assertThat(service.getValue(), equalTo(2)); + Optional service = ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class)); + assertThat(service.isEmpty(), is(true)); } public void testOneProvider() throws Exception { Map sources = Map.of("p.FooService", defineProvider("FooService", 1)); try (var loader = buildProviderJar(sources)) { - TestService service = ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class, loader.classloader()), () -> null); + TestService service = ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class, loader.classloader())) + .orElseThrow(AssertionError::new); assertThat(service, not(nullValue())); assertThat(service.getValue(), equalTo(1)); } @@ -98,7 +95,7 @@ public void testManyProviders() throws Exception { try (var loader = buildProviderJar(sources)) { var e = expectThrows( IllegalStateException.class, - () -> ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class, loader.classloader()), () -> null) + () -> ExtensionLoader.loadSingleton(ServiceLoader.load(TestService.class, loader.classloader())) ); assertThat(e.getMessage(), containsString("More than one extension found")); assertThat(e.getMessage(), containsString("TestService")); diff --git a/server/src/test/java/org/elasticsearch/repositories/IndexIdTests.java b/server/src/test/java/org/elasticsearch/repositories/IndexIdTests.java index 580fdaf6f7f7d..37ff5521c201f 100644 --- a/server/src/test/java/org/elasticsearch/repositories/IndexIdTests.java +++ b/server/src/test/java/org/elasticsearch/repositories/IndexIdTests.java @@ -48,17 +48,20 @@ public void testXContent() throws IOException { IndexId indexId = new IndexId(randomAlphaOfLength(8), UUIDs.randomBase64UUID()); XContentBuilder builder = JsonXContent.contentBuilder(); indexId.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder)); - assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); - String name = null; - String id = null; - while (parser.nextToken() != XContentParser.Token.END_OBJECT) { - final String currentFieldName = parser.currentName(); - parser.nextToken(); - if (currentFieldName.equals(IndexId.NAME)) { - name = parser.text(); - } else if (currentFieldName.equals(IndexId.ID)) { - id = parser.text(); + String name; + String id; + try (XContentParser parser = createParser(JsonXContent.jsonXContent, BytesReference.bytes(builder))) { + assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken()); + name = null; + id = null; + while (parser.nextToken() != XContentParser.Token.END_OBJECT) { + final String currentFieldName = parser.currentName(); + parser.nextToken(); + if (currentFieldName.equals(IndexId.NAME)) { + name = parser.text(); + } else if (currentFieldName.equals(IndexId.ID)) { + id = parser.text(); + } } } assertNotNull(name); diff --git a/server/src/test/java/org/elasticsearch/search/TelemetryMetrics/SearchTransportTelemetryTests.java b/server/src/test/java/org/elasticsearch/search/TelemetryMetrics/SearchTransportTelemetryTests.java new file mode 100644 index 0000000000000..c38cab8c8364b --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/TelemetryMetrics/SearchTransportTelemetryTests.java @@ -0,0 +1,135 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.search.TelemetryMetrics; + +import org.elasticsearch.action.search.SearchType; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.PluginsService; +import org.elasticsearch.telemetry.Measurement; +import org.elasticsearch.telemetry.TestTelemetryPlugin; +import org.elasticsearch.test.ESIntegTestCase; + +import java.util.Collection; +import java.util.List; + +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.DFS_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.FETCH_ID_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.FETCH_ID_SCROLL_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.FREE_CONTEXT_SCROLL_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.QUERY_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.QUERY_ID_ACTION_METRIC; +import static org.elasticsearch.action.search.SearchTransportAPMMetrics.QUERY_SCROLL_ACTION_METRIC; +import static org.elasticsearch.index.query.QueryBuilders.simpleQueryStringQuery; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertScrollResponsesAndHitCount; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHitsWithoutFailures; + +@ESIntegTestCase.ClusterScope(scope = ESIntegTestCase.Scope.TEST, numDataNodes = 1) +public class SearchTransportTelemetryTests extends ESIntegTestCase { + @Override + protected Collection> nodePlugins() { + return List.of(TestTelemetryPlugin.class); + } + + @Override + protected int minimumNumberOfShards() { + return 2; + } + + @Override + protected int maximumNumberOfShards() { + return 7; + } + + @Override + protected int maximumNumberOfReplicas() { + return 0; + } + + public void testSearchTransportMetricsDfsQueryThenFetch() throws InterruptedException { + var indexName = "test1"; + createIndex(indexName); + indexRandom(true, false, prepareIndex(indexName).setId("1").setSource("body", "foo")); + + assertSearchHitsWithoutFailures( + prepareSearch(indexName).setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setQuery(simpleQueryStringQuery("foo")), + "1" + ); + assertEquals(getNumShards(indexName).numPrimaries, getNumberOfMeasurements(DFS_ACTION_METRIC)); + assertEquals(getNumShards(indexName).numPrimaries, getNumberOfMeasurements(QUERY_ID_ACTION_METRIC)); + assertNotEquals(0, getNumberOfMeasurements(FETCH_ID_ACTION_METRIC)); + resetMeter(); + } + + public void testSearchTransportMetricsQueryThenFetch() throws InterruptedException { + var indexName = "test2"; + createIndex(indexName); + indexRandom(true, false, prepareIndex(indexName).setId("1").setSource("body", "foo")); + + assertSearchHitsWithoutFailures( + prepareSearch(indexName).setSearchType(SearchType.QUERY_THEN_FETCH).setQuery(simpleQueryStringQuery("foo")), + "1" + ); + assertEquals(getNumShards(indexName).numPrimaries, getNumberOfMeasurements(QUERY_ACTION_METRIC)); + assertNotEquals(0, getNumberOfMeasurements(FETCH_ID_ACTION_METRIC)); + resetMeter(); + } + + public void testSearchTransportMetricsScroll() throws InterruptedException { + var indexName = "test3"; + createIndex(indexName); + indexRandom( + true, + false, + prepareIndex(indexName).setId("1").setSource("body", "foo"), + prepareIndex(indexName).setId("2").setSource("body", "foo") + ); // getNumShards(indexName).numPrimaries + + assertScrollResponsesAndHitCount( + TimeValue.timeValueSeconds(60), + prepareSearch(indexName).setSearchType(SearchType.DFS_QUERY_THEN_FETCH).setSize(1).setQuery(simpleQueryStringQuery("foo")), + 2, + (respNum, response) -> { + if (respNum == 1) { + assertEquals(getNumShards(indexName).numPrimaries, getNumberOfMeasurements(DFS_ACTION_METRIC)); + assertEquals(getNumShards(indexName).numPrimaries, getNumberOfMeasurements(QUERY_ID_ACTION_METRIC)); + assertNotEquals(0, getNumberOfMeasurements(FETCH_ID_ACTION_METRIC)); + resetMeter(); + } else if (respNum == 2) { + assertEquals(getNumShards(indexName).numPrimaries, getNumberOfMeasurements(QUERY_SCROLL_ACTION_METRIC)); + assertNotEquals(0, getNumberOfMeasurements(FETCH_ID_SCROLL_ACTION_METRIC)); + } else { + resetMeter(); + } + } + ); + + assertEquals(getNumShards(indexName).numPrimaries, getNumberOfMeasurements(FREE_CONTEXT_SCROLL_ACTION_METRIC)); + resetMeter(); + } + + private void resetMeter() { + getTestTelemetryPlugin().resetMeter(); + } + + private TestTelemetryPlugin getTestTelemetryPlugin() { + return internalCluster().getDataNodeInstance(PluginsService.class).filterPlugins(TestTelemetryPlugin.class).toList().get(0); + } + + private long getNumberOfMeasurements(String attributeValue) { + final List measurements = getTestTelemetryPlugin().getLongHistogramMeasurement( + org.elasticsearch.action.search.SearchTransportAPMMetrics.SEARCH_ACTION_LATENCY_BASE_METRIC + ); + return measurements.stream() + .filter( + m -> m.attributes().get(org.elasticsearch.action.search.SearchTransportAPMMetrics.ACTION_ATTRIBUTE_NAME) == attributeValue + ) + .count(); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java b/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java index cd3d195030c55..f42ca49dc14b9 100644 --- a/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java +++ b/server/src/test/java/org/elasticsearch/search/query/ThrowingQueryBuilder.java @@ -67,7 +67,7 @@ public ThrowingQueryBuilder(StreamInput in) throws IOException { this.randomUID = in.readLong(); this.failure = in.readException(); this.shardId = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { this.index = in.readOptionalString(); } else { this.index = null; @@ -79,7 +79,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeLong(randomUID); out.writeException(failure); out.writeVInt(shardId); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { out.writeOptionalString(index); } } diff --git a/server/src/test/java/org/elasticsearch/search/suggest/completion/QueryContextTestCase.java b/server/src/test/java/org/elasticsearch/search/suggest/completion/QueryContextTestCase.java index 48711a665c39e..8395fcce918d9 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/completion/QueryContextTestCase.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/completion/QueryContextTestCase.java @@ -34,11 +34,12 @@ public void testToXContext() throws IOException { QC toXContent = createTestModel(); XContentBuilder builder = XContentFactory.jsonBuilder(); toXContent.toXContent(builder, ToXContent.EMPTY_PARAMS); - XContentParser parser = createParser(builder); - parser.nextToken(); - QC fromXContext = fromXContent(parser); - assertEquals(toXContent, fromXContext); - assertEquals(toXContent.hashCode(), fromXContext.hashCode()); + try (XContentParser parser = createParser(builder)) { + parser.nextToken(); + QC fromXContext = fromXContent(parser); + assertEquals(toXContent, fromXContext); + assertEquals(toXContent.hashCode(), fromXContext.hashCode()); + } } } } diff --git a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java index ad09c58b65cba..c87154e686a01 100644 --- a/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/elasticsearch/snapshots/SnapshotResiliencyTests.java @@ -59,6 +59,7 @@ import org.elasticsearch.action.search.SearchPhaseController; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchTransportAPMMetrics; import org.elasticsearch.action.search.SearchTransportService; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.action.support.ActionFilters; @@ -175,6 +176,7 @@ import org.elasticsearch.search.SearchService; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.FetchPhase; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.telemetry.tracing.Tracer; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -2018,7 +2020,8 @@ protected void assertSnapshotOrGenericThread() { actionFilters, indexNameExpressionResolver, namedWriteableRegistry, - EmptySystemIndices.INSTANCE.getExecutorSelector() + EmptySystemIndices.INSTANCE.getExecutorSelector(), + new SearchTransportAPMMetrics(TelemetryProvider.NOOP.getMeterRegistry()) ) ); actions.put( diff --git a/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java b/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java index ead43d0bac05e..b3c7c5adac95d 100644 --- a/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/ProxyConnectionStrategyTests.java @@ -130,7 +130,11 @@ public void testProxyStrategyWillOpenExpectedNumberOfConnectionsToAddress() { ); int numOfConnections = randomIntBetween(4, 8); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); ProxyConnectionStrategy strategy = new ProxyConnectionStrategy( clusterAlias, localService, @@ -188,7 +192,11 @@ public void testProxyStrategyWillOpenNewConnectionsOnDisconnect() throws Excepti AtomicBoolean useAddress1 = new AtomicBoolean(true); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); ProxyConnectionStrategy strategy = new ProxyConnectionStrategy( clusterAlias, localService, @@ -263,7 +271,11 @@ public void testConnectFailsWithIncompatibleNodes() { ); int numOfConnections = randomIntBetween(4, 8); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); ProxyConnectionStrategy strategy = new ProxyConnectionStrategy( clusterAlias, localService, @@ -328,7 +340,11 @@ public void testConnectFailsWithNonRetryableException() { ); int numOfConnections = randomIntBetween(4, 8); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); ProxyConnectionStrategy strategy = new ProxyConnectionStrategy( clusterAlias, localService, @@ -388,7 +404,11 @@ public void testClusterNameValidationPreventConnectingToDifferentClusters() thro AtomicBoolean useAddress1 = new AtomicBoolean(true); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); ProxyConnectionStrategy strategy = new ProxyConnectionStrategy( clusterAlias, localService, @@ -459,7 +479,11 @@ public void testProxyStrategyWillResolveAddressesEachConnect() throws Exception ); int numOfConnections = randomIntBetween(4, 8); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); ProxyConnectionStrategy strategy = new ProxyConnectionStrategy( clusterAlias, localService, @@ -511,7 +535,11 @@ public void onNodeConnected(DiscoveryNode node, Transport.Connection connection) }); try ( - var remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + var remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); var strategy = new ProxyConnectionStrategy( clusterAlias, localService, @@ -554,7 +582,11 @@ public void testProxyStrategyWillNeedToBeRebuiltIfNumOfSocketsOrAddressesOrServe ); int numOfConnections = randomIntBetween(4, 8); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); ProxyConnectionStrategy strategy = new ProxyConnectionStrategy( clusterAlias, localService, @@ -672,7 +704,11 @@ public void testServerNameAttributes() { ); int numOfConnections = randomIntBetween(4, 8); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); ProxyConnectionStrategy strategy = new ProxyConnectionStrategy( clusterAlias, localService, diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java index d4f03f1027838..dee28d6dea630 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterConnectionTests.java @@ -45,7 +45,6 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.transport.MockTransportService; import org.elasticsearch.threadpool.TestThreadPool; @@ -62,6 +61,7 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; +import java.util.Objects; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.CountDownLatch; @@ -155,17 +155,14 @@ public static MockTransportService startTransport( } else { searchHits = new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN); } - InternalSearchResponse response = new InternalSearchResponse( + SearchResponse searchResponse = new SearchResponse( searchHits, InternalAggregations.EMPTY, null, - null, false, null, - 1 - ); - SearchResponse searchResponse = new SearchResponse( - response, + null, + 1, null, 1, 1, @@ -252,7 +249,14 @@ public void run() { AtomicReference exceptionReference = new AtomicReference<>(); String clusterAlias = "test-cluster"; Settings settings = buildRandomSettings(clusterAlias, addresses(seedNode)); - try (RemoteClusterConnection connection = new RemoteClusterConnection(settings, clusterAlias, service, randomBoolean())) { + try ( + RemoteClusterConnection connection = new RemoteClusterConnection( + settings, + clusterAlias, + service, + randomFrom(RemoteClusterCredentialsManager.EMPTY, buildCredentialsManager(clusterAlias)) + ) + ) { ActionListener listener = ActionListener.wrap(x -> { listenerCalled.countDown(); fail("expected exception"); @@ -322,7 +326,14 @@ public void testCloseWhileConcurrentlyConnecting() throws IOException, Interrupt service.acceptIncomingRequests(); String clusterAlias = "test-cluster"; Settings settings = buildRandomSettings(clusterAlias, seedNodes); - try (RemoteClusterConnection connection = new RemoteClusterConnection(settings, clusterAlias, service, false)) { + try ( + RemoteClusterConnection connection = new RemoteClusterConnection( + settings, + clusterAlias, + service, + RemoteClusterCredentialsManager.EMPTY + ) + ) { int numThreads = randomIntBetween(4, 10); Thread[] threads = new Thread[numThreads]; CyclicBarrier barrier = new CyclicBarrier(numThreads + 1); @@ -470,7 +481,12 @@ private void doTestGetConnectionInfo(boolean hasClusterCredentials) throws Excep settings = Settings.builder().put(settings).setSecureSettings(secureSettings).build(); } try ( - RemoteClusterConnection connection = new RemoteClusterConnection(settings, clusterAlias, service, hasClusterCredentials) + RemoteClusterConnection connection = new RemoteClusterConnection( + settings, + clusterAlias, + service, + hasClusterCredentials ? buildCredentialsManager(clusterAlias) : RemoteClusterCredentialsManager.EMPTY + ) ) { // test no nodes connected RemoteConnectionInfo remoteConnectionInfo = assertSerialization(connection.getConnectionInfo()); @@ -662,7 +678,12 @@ private void doTestCollectNodes(boolean hasClusterCredentials) throws Exception } try ( - RemoteClusterConnection connection = new RemoteClusterConnection(settings, clusterAlias, service, hasClusterCredentials) + RemoteClusterConnection connection = new RemoteClusterConnection( + settings, + clusterAlias, + service, + hasClusterCredentials ? buildCredentialsManager(clusterAlias) : RemoteClusterCredentialsManager.EMPTY + ) ) { CountDownLatch responseLatch = new CountDownLatch(1); AtomicReference> reference = new AtomicReference<>(); @@ -713,7 +734,14 @@ public void testNoChannelsExceptREG() throws Exception { String clusterAlias = "test-cluster"; Settings settings = buildRandomSettings(clusterAlias, addresses(seedNode)); - try (RemoteClusterConnection connection = new RemoteClusterConnection(settings, clusterAlias, service, false)) { + try ( + RemoteClusterConnection connection = new RemoteClusterConnection( + settings, + clusterAlias, + service, + RemoteClusterCredentialsManager.EMPTY + ) + ) { PlainActionFuture plainActionFuture = new PlainActionFuture<>(); connection.ensureConnected(plainActionFuture); plainActionFuture.get(10, TimeUnit.SECONDS); @@ -779,7 +807,14 @@ public void testConnectedNodesConcurrentAccess() throws IOException, Interrupted String clusterAlias = "test-cluster"; Settings settings = buildRandomSettings(clusterAlias, seedNodes); - try (RemoteClusterConnection connection = new RemoteClusterConnection(settings, clusterAlias, service, randomBoolean())) { + try ( + RemoteClusterConnection connection = new RemoteClusterConnection( + settings, + clusterAlias, + service, + randomFrom(RemoteClusterCredentialsManager.EMPTY, buildCredentialsManager(clusterAlias)) + ) + ) { final int numGetThreads = randomIntBetween(4, 10); final Thread[] getThreads = new Thread[numGetThreads]; final int numModifyingThreads = randomIntBetween(4, 10); @@ -873,7 +908,14 @@ public void testGetConnection() throws Exception { service.acceptIncomingRequests(); String clusterAlias = "test-cluster"; Settings settings = buildRandomSettings(clusterAlias, addresses(seedNode)); - try (RemoteClusterConnection connection = new RemoteClusterConnection(settings, clusterAlias, service, false)) { + try ( + RemoteClusterConnection connection = new RemoteClusterConnection( + settings, + clusterAlias, + service, + RemoteClusterCredentialsManager.EMPTY + ) + ) { PlainActionFuture.get(fut -> connection.ensureConnected(fut.map(x -> null))); for (int i = 0; i < 10; i++) { // always a direct connection as the remote node is already connected @@ -921,4 +963,13 @@ private static Settings buildSniffSettings(String clusterAlias, List see ); return builder.build(); } + + private static RemoteClusterCredentialsManager buildCredentialsManager(String clusterAlias) { + Objects.requireNonNull(clusterAlias); + final Settings.Builder builder = Settings.builder(); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("cluster.remote." + clusterAlias + ".credentials", randomAlphaOfLength(20)); + builder.setSecureSettings(secureSettings); + return new RemoteClusterCredentialsManager(builder.build()); + } } diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterCredentialsManagerTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterCredentialsManagerTests.java new file mode 100644 index 0000000000000..f02148a40e47e --- /dev/null +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterCredentialsManagerTests.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.transport; + +import org.elasticsearch.common.settings.MockSecureSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; + +public class RemoteClusterCredentialsManagerTests extends ESTestCase { + public void testResolveRemoteClusterCredentials() { + final String clusterAlias = randomAlphaOfLength(9); + final String otherClusterAlias = randomAlphaOfLength(10); + + final String secret = randomAlphaOfLength(20); + final Settings settings = buildSettingsWithCredentials(clusterAlias, secret); + RemoteClusterCredentialsManager credentialsManager = new RemoteClusterCredentialsManager(settings); + assertThat(credentialsManager.resolveCredentials(clusterAlias).toString(), equalTo(secret)); + assertThat(credentialsManager.hasCredentials(otherClusterAlias), is(false)); + + final String updatedSecret = randomAlphaOfLength(21); + credentialsManager.updateClusterCredentials(buildSettingsWithCredentials(clusterAlias, updatedSecret)); + assertThat(credentialsManager.resolveCredentials(clusterAlias).toString(), equalTo(updatedSecret)); + + credentialsManager.updateClusterCredentials(Settings.EMPTY); + assertThat(credentialsManager.hasCredentials(clusterAlias), is(false)); + } + + private Settings buildSettingsWithCredentials(String clusterAlias, String secret) { + final Settings.Builder builder = Settings.builder(); + final MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("cluster.remote." + clusterAlias + ".credentials", secret); + return builder.setSecureSettings(secureSettings).build(); + } +} diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java index 839138d3c7c34..b1ffda669e6a1 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionManagerTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeUtils; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -23,17 +24,20 @@ import java.io.IOException; import java.net.InetAddress; import java.util.HashSet; +import java.util.Optional; import java.util.Set; import java.util.concurrent.ExecutionException; import static org.elasticsearch.transport.RemoteClusterService.REMOTE_CLUSTER_HANDSHAKE_ACTION_NAME; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.mockito.ArgumentMatchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.when; public class RemoteConnectionManagerTests extends ESTestCase { @@ -49,6 +53,7 @@ public void setUp() throws Exception { transport = mock(Transport.class); remoteConnectionManager = new RemoteConnectionManager( "remote-cluster", + RemoteClusterCredentialsManager.EMPTY, new ClusterConnectionManager(Settings.EMPTY, transport, new ThreadContext(Settings.EMPTY)) ); @@ -120,10 +125,13 @@ public void testResolveRemoteClusterAlias() throws ExecutionException, Interrupt public void testRewriteHandshakeAction() throws IOException { final Transport.Connection connection = mock(Transport.Connection.class); + final String clusterAlias = randomAlphaOfLengthBetween(3, 8); + final RemoteClusterCredentialsManager credentialsResolver = mock(RemoteClusterCredentialsManager.class); + when(credentialsResolver.resolveCredentials(clusterAlias)).thenReturn(new SecureString(randomAlphaOfLength(42))); final Transport.Connection wrappedConnection = RemoteConnectionManager.wrapConnectionWithRemoteClusterInfo( connection, - randomAlphaOfLengthBetween(3, 8), - RemoteClusterPortSettings.REMOTE_CLUSTER_PROFILE + clusterAlias, + credentialsResolver ); final long requestId = randomLong(); final TransportRequest request = mock(TransportRequest.class); @@ -142,6 +150,26 @@ public void testRewriteHandshakeAction() throws IOException { verify(connection).sendRequest(requestId, anotherAction, request, options); } + public void testWrapAndResolveConnectionRoundTrip() { + final Transport.Connection connection = mock(Transport.Connection.class); + final String clusterAlias = randomAlphaOfLengthBetween(3, 8); + final RemoteClusterCredentialsManager credentialsResolver = mock(RemoteClusterCredentialsManager.class); + final SecureString credentials = new SecureString(randomAlphaOfLength(42)); + // second credential will never be resolved + when(credentialsResolver.resolveCredentials(clusterAlias)).thenReturn(credentials, (SecureString) null); + final Transport.Connection wrappedConnection = RemoteConnectionManager.wrapConnectionWithRemoteClusterInfo( + connection, + clusterAlias, + credentialsResolver + ); + + final Optional actual = RemoteConnectionManager + .resolveRemoteClusterAliasWithCredentials(wrappedConnection); + + assertThat(actual.isPresent(), is(true)); + assertThat(actual.get(), equalTo(new RemoteConnectionManager.RemoteClusterAliasWithCredentials(clusterAlias, credentials))); + } + private static class TestRemoteConnection extends CloseableConnection { private final DiscoveryNode node; diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteConnectionStrategyTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionStrategyTests.java index 5d461e906a266..ca9986ba5eb1f 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteConnectionStrategyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteConnectionStrategyTests.java @@ -26,7 +26,11 @@ public void testStrategyChangeMeansThatStrategyMustBeRebuilt() { mock(Transport.class), threadContext ); - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager("cluster-alias", connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + "cluster-alias", + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); FakeConnectionStrategy first = new FakeConnectionStrategy( "cluster-alias", mock(TransportService.class), @@ -46,7 +50,11 @@ public void testSameStrategyChangeMeansThatStrategyDoesNotNeedToBeRebuilt() { mock(Transport.class), threadContext ); - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager("cluster-alias", connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + "cluster-alias", + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); FakeConnectionStrategy first = new FakeConnectionStrategy( "cluster-alias", mock(TransportService.class), @@ -69,7 +77,11 @@ public void testChangeInConnectionProfileMeansTheStrategyMustBeRebuilt() { assertEquals(TimeValue.MINUS_ONE, connectionManager.getConnectionProfile().getPingInterval()); assertEquals(Compression.Enabled.INDEXING_DATA, connectionManager.getConnectionProfile().getCompressionEnabled()); assertEquals(Compression.Scheme.LZ4, connectionManager.getConnectionProfile().getCompressionScheme()); - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager("cluster-alias", connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + "cluster-alias", + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); FakeConnectionStrategy first = new FakeConnectionStrategy( "cluster-alias", mock(TransportService.class), diff --git a/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java b/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java index 3c955258d45c8..ddee1ff4d690a 100644 --- a/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java +++ b/server/src/test/java/org/elasticsearch/transport/SniffConnectionStrategyTests.java @@ -192,7 +192,11 @@ public void testSniffStrategyWillConnectToAndDiscoverNodes() { threadPool.getThreadContext() ); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + hasClusterCredentials ? new RemoteClusterCredentialsManager(clientSettings) : RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); SniffConnectionStrategy strategy = new SniffConnectionStrategy( clusterAlias, localService, @@ -262,7 +266,11 @@ public void testSniffStrategyWillResolveDiscoveryNodesEachConnect() throws Excep threadPool.getThreadContext() ); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); SniffConnectionStrategy strategy = new SniffConnectionStrategy( clusterAlias, localService, @@ -336,7 +344,11 @@ public void testSniffStrategyWillConnectToMaxAllowedNodesAndOpenNewConnectionsOn threadPool.getThreadContext() ); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); SniffConnectionStrategy strategy = new SniffConnectionStrategy( clusterAlias, localService, @@ -424,7 +436,11 @@ public void testDiscoverWithSingleIncompatibleSeedNode() { threadPool.getThreadContext() ); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); SniffConnectionStrategy strategy = new SniffConnectionStrategy( clusterAlias, localService, @@ -486,7 +502,11 @@ public void testConnectFailsWithIncompatibleNodes() { threadPool.getThreadContext() ); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); SniffConnectionStrategy strategy = new SniffConnectionStrategy( clusterAlias, localService, @@ -549,7 +569,11 @@ public void testFilterNodesWithNodePredicate() { threadPool.getThreadContext() ); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); SniffConnectionStrategy strategy = new SniffConnectionStrategy( clusterAlias, localService, @@ -617,7 +641,11 @@ public void testConnectFailsIfNoConnectionsOpened() { threadPool.getThreadContext() ); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); SniffConnectionStrategy strategy = new SniffConnectionStrategy( clusterAlias, localService, @@ -694,7 +722,11 @@ public void testClusterNameValidationPreventConnectingToDifferentClusters() thro threadPool.getThreadContext() ); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); SniffConnectionStrategy strategy = new SniffConnectionStrategy( clusterAlias, localService, @@ -783,7 +815,11 @@ public void testMultipleCallsToConnectEnsuresConnection() { threadPool.getThreadContext() ); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); SniffConnectionStrategy strategy = new SniffConnectionStrategy( clusterAlias, localService, @@ -895,7 +931,11 @@ public void testConfiguredProxyAddressModeWillReplaceNodeAddress() { threadPool.getThreadContext() ); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); SniffConnectionStrategy strategy = new SniffConnectionStrategy( clusterAlias, localService, @@ -964,7 +1004,11 @@ public void testSniffStrategyWillNeedToBeRebuiltIfNumOfConnectionsOrSeedsOrProxy threadPool.getThreadContext() ); try ( - RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager(clusterAlias, connectionManager); + RemoteConnectionManager remoteConnectionManager = new RemoteConnectionManager( + clusterAlias, + RemoteClusterCredentialsManager.EMPTY, + connectionManager + ); SniffConnectionStrategy strategy = new SniffConnectionStrategy( clusterAlias, localService, diff --git a/server/src/test/java/org/elasticsearch/transport/TransportInfoTests.java b/server/src/test/java/org/elasticsearch/transport/TransportInfoTests.java index 8e23f0e3984b9..261a4ba339c18 100644 --- a/server/src/test/java/org/elasticsearch/transport/TransportInfoTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TransportInfoTests.java @@ -70,7 +70,10 @@ private void assertPublishAddress(TransportInfo httpInfo, String expected) throw httpInfo.toXContent(builder, ToXContent.EMPTY_PARAMS); builder.endObject(); - Map transportMap = (Map) createParser(builder).map().get(TransportInfo.Fields.TRANSPORT); + Map transportMap; + try (var parser = createParser(builder)) { + transportMap = (Map) parser.map().get(TransportInfo.Fields.TRANSPORT); + } Map profilesMap = (Map) transportMap.get("profiles"); assertEquals(expected, transportMap.get(TransportInfo.Fields.PUBLISH_ADDRESS)); assertEquals(expected, ((Map) profilesMap.get("test_profile")).get(TransportInfo.Fields.PUBLISH_ADDRESS)); diff --git a/settings.gradle b/settings.gradle index b3a33e11c4ec4..ce35c873f176e 100644 --- a/settings.gradle +++ b/settings.gradle @@ -97,9 +97,9 @@ List projects = [ 'test:fixtures:minio-fixture', 'test:fixtures:old-elasticsearch', 'test:fixtures:s3-fixture', + 'test:fixtures:testcontainer-utils', 'test:fixtures:geoip-fixture', 'test:fixtures:url-fixture', - 'test:fixtures:nginx-fixture', 'test:logger-usage', 'test:test-clusters', 'test:x-content', diff --git a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java index f6c5fdfe4db5c..93d08fbccd376 100644 --- a/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java +++ b/test/external-modules/apm-integration/src/javaRestTest/java/org/elasticsearch/test/apmintegration/MetricsApmIT.java @@ -56,7 +56,6 @@ protected String getTestRestCluster() { return cluster.getHttpAddresses(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103286") @SuppressWarnings("unchecked") public void testApmIntegration() throws Exception { Map>> sampleAssertions = new HashMap<>( @@ -108,8 +107,8 @@ public void testApmIntegration() throws Exception { client().performRequest(new Request("GET", "/_use_apm_metrics")); - finished.await(30, TimeUnit.SECONDS); - assertThat(sampleAssertions, Matchers.anEmptyMap()); + assertTrue("Timeout when waiting for assertions to complete.", finished.await(30, TimeUnit.SECONDS)); + assertThat(sampleAssertions, Matchers.equalTo(Collections.emptyMap())); } private Map.Entry>> assertion( diff --git a/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/IndexError.java b/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/IndexError.java index 19284152efab6..92b05ec9bf649 100644 --- a/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/IndexError.java +++ b/test/external-modules/error-query/src/main/java/org/elasticsearch/test/errorquery/IndexError.java @@ -53,7 +53,7 @@ public IndexError(StreamInput in) throws IOException { this.shardIds = in.readBoolean() ? in.readIntArray() : null; this.errorType = in.readEnum(ERROR_TYPE.class); this.message = in.readString(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_051)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { this.stallTimeSeconds = in.readVInt(); } else { this.stallTimeSeconds = 0; @@ -69,7 +69,7 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeEnum(errorType); out.writeString(message); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_051)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { out.writeVInt(stallTimeSeconds); } } diff --git a/test/fixtures/krb5kdc-fixture/build.gradle b/test/fixtures/krb5kdc-fixture/build.gradle index 2ad51718f272b..ab9b4a32ed322 100644 --- a/test/fixtures/krb5kdc-fixture/build.gradle +++ b/test/fixtures/krb5kdc-fixture/build.gradle @@ -1,3 +1,6 @@ +import org.gradle.api.services.internal.BuildServiceProvider +import org.gradle.api.services.internal.BuildServiceRegistryInternal + /* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License @@ -13,15 +16,15 @@ tasks.named("preProcessFixture").configure { doLast { // We need to create these up-front because if docker creates them they will be owned by root and we won't be // able to clean them up - services.each { file("${testFixturesDir}/shared/${it}").mkdirs() } + services.each { fixturesDir.dir("shared/${it}").get().getAsFile().mkdirs() } } } -tasks.named("postProcessFixture").configure { - inputs.dir("${testFixturesDir}/shared") +tasks.named("postProcessFixture").configure { task -> + inputs.dir(fixturesDir.dir('shared').get().getAsFile()) services.each { service -> - File confTemplate = file("${testFixturesDir}/shared/${service}/krb5.conf.template") - File confFile = file("${testFixturesDir}/shared/${service}/krb5.conf") + File confTemplate = fixturesDir.file("shared/${service}/krb5.conf.template").get().asFile + File confFile = fixturesDir.file("shared/${service}/krb5.conf").get().asFile outputs.file(confFile) doLast { assert confTemplate.exists() @@ -32,9 +35,8 @@ tasks.named("postProcessFixture").configure { } } - -project.ext.krb5Conf = { service -> file("$testFixturesDir/shared/${service}/krb5.conf") } -project.ext.krb5Keytabs = { service, fileName -> file("$testFixturesDir/shared/${service}/keytabs/${fileName}") } +project.ext.krb5Conf = { s -> file("$testFixturesDir/shared/${s}/krb5.conf") } +project.ext.krb5Keytabs = { s, fileName -> file("$testFixturesDir/shared/${s}/keytabs/${fileName}") } configurations { krb5ConfHdfsFile { diff --git a/test/fixtures/minio-fixture/build.gradle b/test/fixtures/minio-fixture/build.gradle index 66613809068f7..3c97315dc07ce 100644 --- a/test/fixtures/minio-fixture/build.gradle +++ b/test/fixtures/minio-fixture/build.gradle @@ -5,32 +5,26 @@ * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ -apply plugin: 'java' apply plugin: 'elasticsearch.java' apply plugin: 'elasticsearch.cache-test-fixtures' description = 'Fixture for MinIO Storage service' configurations.all { - transitive = false + exclude group: 'org.hamcrest', module: 'hamcrest-core' } - dependencies { - testImplementation project(':test:framework') - + testImplementation (project(':test:framework')) api "junit:junit:${versions.junit}" - api "org.testcontainers:testcontainers:${versions.testcontainer}" - implementation "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" - implementation "org.slf4j:slf4j-api:${versions.slf4j}" - implementation "com.github.docker-java:docker-java-api:${versions.dockerJava}" - - runtimeOnly "com.github.docker-java:docker-java-transport-zerodep:${versions.dockerJava}" - runtimeOnly "com.github.docker-java:docker-java-transport:${versions.dockerJava}" - runtimeOnly "com.github.docker-java:docker-java-core:${versions.dockerJava}" - runtimeOnly "org.apache.commons:commons-compress:${versions.commonsCompress}" - runtimeOnly "org.rnorth.duct-tape:duct-tape:${versions.ductTape}" + api project(':test:fixtures:testcontainer-utils') + api("org.testcontainers:testcontainers:${versions.testcontainer}") { + transitive = false + } + api("com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}"){ + transitive = false + } // ensure we have proper logging during when used in tests runtimeOnly "org.slf4j:slf4j-simple:${versions.slf4j}" - runtimeOnly "org.hamcrest:hamcrest:${versions.hamcrest}" + runtimeOnly("org.hamcrest:hamcrest:${versions.hamcrest}") } diff --git a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java index a7e6ba8d785a1..671632f2e0125 100644 --- a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java +++ b/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/minio/MinioTestContainer.java @@ -8,12 +8,10 @@ package org.elasticsearch.test.fixtures.minio; -import org.elasticsearch.test.fixtures.CacheableTestFixture; import org.elasticsearch.test.fixtures.testcontainers.DockerEnvironmentAwareTestContainer; -import org.junit.rules.TestRule; import org.testcontainers.images.builder.ImageFromDockerfile; -public final class MinioTestContainer extends DockerEnvironmentAwareTestContainer implements TestRule, CacheableTestFixture { +public final class MinioTestContainer extends DockerEnvironmentAwareTestContainer { private static final int servicePort = 9000; public static final String DOCKER_BASE_IMAGE = "minio/minio:RELEASE.2021-03-01T04-20-55Z"; @@ -25,7 +23,7 @@ public MinioTestContainer() { public MinioTestContainer(boolean enabled) { super( - new ImageFromDockerfile().withDockerfileFromBuilder( + new ImageFromDockerfile("es-minio-testfixture").withDockerfileFromBuilder( builder -> builder.from(DOCKER_BASE_IMAGE) .env("MINIO_ACCESS_KEY", "s3_test_access_key") .env("MINIO_SECRET_KEY", "s3_test_secret_key") @@ -50,13 +48,4 @@ public void start() { public String getAddress() { return "http://127.0.0.1:" + getMappedPort(servicePort); } - - public void cache() { - try { - start(); - stop(); - } catch (RuntimeException e) { - logger().warn("Error while caching container images.", e); - } - } } diff --git a/test/fixtures/nginx-fixture/Dockerfile b/test/fixtures/nginx-fixture/Dockerfile deleted file mode 100644 index 01bad77c488c8..0000000000000 --- a/test/fixtures/nginx-fixture/Dockerfile +++ /dev/null @@ -1,2 +0,0 @@ -FROM nginx -COPY nginx.conf /etc/nginx/nginx.conf diff --git a/test/fixtures/nginx-fixture/build.gradle b/test/fixtures/nginx-fixture/build.gradle deleted file mode 100644 index 438473f70a6f2..0000000000000 --- a/test/fixtures/nginx-fixture/build.gradle +++ /dev/null @@ -1,22 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0 and the Server Side Public License, v 1; you may not use this file except - * in compliance with, at your election, the Elastic License 2.0 or the Server - * Side Public License, v 1. - */ -apply plugin: 'elasticsearch.test.fixtures' - -description = 'Fixture for an external http service' - -// These directories are shared between the URL repository and the FS repository in integration tests -project.ext { - fsRepositoryDir = file("${testFixturesDir}/fs-repository") -} - -tasks.named("preProcessFixture").configure { - doLast { - // tests expect to have an empty repo - project.ext.fsRepositoryDir.mkdirs() - } -} diff --git a/test/fixtures/nginx-fixture/docker-compose.yml b/test/fixtures/nginx-fixture/docker-compose.yml deleted file mode 100644 index bf6ab56bb0c9a..0000000000000 --- a/test/fixtures/nginx-fixture/docker-compose.yml +++ /dev/null @@ -1,9 +0,0 @@ -version: '3' -services: - nginx-fixture: - build: - context: . - volumes: - - ./testfixtures_shared/fs-repository:/data - ports: - - "80" diff --git a/test/fixtures/nginx-fixture/nginx.conf b/test/fixtures/nginx-fixture/nginx.conf deleted file mode 100644 index 9b199b2dc48b7..0000000000000 --- a/test/fixtures/nginx-fixture/nginx.conf +++ /dev/null @@ -1,10 +0,0 @@ -events {} - -http { - server { - listen 80 default_server; - listen [::]:80 default_server; - - root /data; - } -} diff --git a/test/fixtures/testcontainer-utils/build.gradle b/test/fixtures/testcontainer-utils/build.gradle new file mode 100644 index 0000000000000..3766722abcd65 --- /dev/null +++ b/test/fixtures/testcontainer-utils/build.gradle @@ -0,0 +1,22 @@ +apply plugin: 'elasticsearch.java' + +configurations.all { + transitive = false +} + +dependencies { + testImplementation project(':test:framework') + api "junit:junit:${versions.junit}" + api "org.testcontainers:testcontainers:${versions.testcontainer}" + implementation "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" + api "com.github.docker-java:docker-java-api:${versions.dockerJava}" + implementation "org.slf4j:slf4j-api:${versions.slf4j}" + runtimeOnly "com.github.docker-java:docker-java-transport-zerodep:${versions.dockerJava}" + runtimeOnly "com.github.docker-java:docker-java-transport:${versions.dockerJava}" + runtimeOnly "com.github.docker-java:docker-java-core:${versions.dockerJava}" + runtimeOnly "org.apache.commons:commons-compress:${versions.commonsCompress}" + runtimeOnly "org.rnorth.duct-tape:duct-tape:${versions.ductTape}" + runtimeOnly "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" + runtimeOnly "com.fasterxml.jackson.core:jackson-annotations:${versions.jackson}" + +} diff --git a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/CacheableTestFixture.java b/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/CacheableTestFixture.java similarity index 100% rename from test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/CacheableTestFixture.java rename to test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/CacheableTestFixture.java diff --git a/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/ResourceUtils.java b/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/ResourceUtils.java new file mode 100644 index 0000000000000..8fe64ea34d8d4 --- /dev/null +++ b/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/ResourceUtils.java @@ -0,0 +1,36 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0 and the Server Side Public License, v 1; you may not use this file except + * in compliance with, at your election, the Elastic License 2.0 or the Server + * Side Public License, v 1. + */ + +package org.elasticsearch.test.fixtures; + +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.net.URL; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.StandardCopyOption; + +public final class ResourceUtils { + + public static Path copyResourceToFile(Class clazz, Path targetFolder, String resourcePath) { + try { + ClassLoader classLoader = clazz.getClassLoader(); + URL resourceUrl = classLoader.getResource(resourcePath); + if (resourceUrl == null) { + throw new RuntimeException("Failed to load " + resourcePath + " from classpath"); + } + InputStream inputStream = resourceUrl.openStream(); + File outputFile = new File(targetFolder.toFile(), resourcePath.substring(resourcePath.lastIndexOf("/"))); + Files.copy(inputStream, outputFile.toPath(), StandardCopyOption.REPLACE_EXISTING); + return outputFile.toPath(); + } catch (IOException e) { + throw new RuntimeException("Failed to load ca.jks from classpath", e); + } + } +} diff --git a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java b/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java similarity index 88% rename from test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java rename to test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java index c0fb83e5206f4..ce4d6fda861cd 100644 --- a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java +++ b/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/DockerEnvironmentAwareTestContainer.java @@ -8,12 +8,14 @@ package org.elasticsearch.test.fixtures.testcontainers; -import org.elasticsearch.test.fixtures.minio.MinioTestContainer; +import org.elasticsearch.test.fixtures.CacheableTestFixture; import org.junit.Assume; +import org.junit.rules.TestRule; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.testcontainers.DockerClientFactory; import org.testcontainers.containers.GenericContainer; +import org.testcontainers.containers.output.Slf4jLogConsumer; import org.testcontainers.images.builder.ImageFromDockerfile; import java.io.File; @@ -27,7 +29,10 @@ import java.util.Map; import java.util.stream.Collectors; -public class DockerEnvironmentAwareTestContainer extends GenericContainer { +public abstract class DockerEnvironmentAwareTestContainer extends GenericContainer + implements + TestRule, + CacheableTestFixture { protected static final Logger LOGGER = LoggerFactory.getLogger(DockerEnvironmentAwareTestContainer.class); private static final String DOCKER_ON_LINUX_EXCLUSIONS_FILE = ".ci/dockerOnLinuxExclusions"; @@ -59,9 +64,20 @@ public DockerEnvironmentAwareTestContainer(ImageFromDockerfile imageFromDockerfi public void start() { Assume.assumeFalse("Docker support excluded on OS", EXCLUDED_OS); Assume.assumeTrue("Docker probing succesful", DOCKER_PROBING_SUCCESSFUL); + withLogConsumer(new Slf4jLogConsumer(logger())); super.start(); } + @Override + public void cache() { + try { + start(); + stop(); + } catch (RuntimeException e) { + logger().warn("Error while caching container images.", e); + } + } + static String deriveId(Map values) { return values.get("ID") + "-" + values.get("VERSION_ID"); } diff --git a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/testcontainers/TestContainersThreadFilter.java b/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/TestContainersThreadFilter.java similarity index 80% rename from test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/testcontainers/TestContainersThreadFilter.java rename to test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/TestContainersThreadFilter.java index 1b0dacbacfd1a..d825330120eec 100644 --- a/test/fixtures/minio-fixture/src/main/java/org/elasticsearch/test/fixtures/testcontainers/TestContainersThreadFilter.java +++ b/test/fixtures/testcontainer-utils/src/main/java/org/elasticsearch/test/fixtures/testcontainers/TestContainersThreadFilter.java @@ -17,6 +17,8 @@ public class TestContainersThreadFilter implements ThreadFilter { @Override public boolean reject(Thread t) { - return t.getName().startsWith("testcontainers-") || t.getName().startsWith("ducttape"); + return t.getName().startsWith("testcontainers-") + || t.getName().startsWith("ducttape") + || t.getName().startsWith("ForkJoinPool.commonPool-worker-"); } } diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java index bb86dab60b0eb..1004ea5b50119 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/MockInternalClusterInfoService.java @@ -107,7 +107,7 @@ ShardStats[] adjustShardStats(ShardStats[] shardsStats) { var storeStats = new StoreStats( shardSizeFunctionCopy.apply(shardRouting), shardSizeFunctionCopy.apply(shardRouting), - shardStats.getStats().store.getReservedSize().getBytes() + shardStats.getStats().store.reservedSizeInBytes() ); var commonStats = new CommonStats(new CommonStatsFlags(CommonStatsFlags.Flag.Store)); commonStats.store = storeStats; diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java index 89c8546d6b7d2..1158e805ba3c1 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/routing/TestShardRouting.java @@ -36,6 +36,16 @@ public static ShardRouting newShardRouting(String index, int shardId, String cur } public static ShardRouting newShardRouting(ShardId shardId, String currentNodeId, boolean primary, ShardRoutingState state) { + return newShardRouting(shardId, currentNodeId, primary, state, -1); + } + + public static ShardRouting newShardRouting( + ShardId shardId, + String currentNodeId, + boolean primary, + ShardRoutingState state, + long expectedShardSize + ) { assertNotEquals(ShardRoutingState.RELOCATING, state); return new ShardRouting( shardId, @@ -47,7 +57,7 @@ public static ShardRouting newShardRouting(ShardId shardId, String currentNodeId buildUnassignedInfo(state), buildRelocationFailureInfo(state), buildAllocationId(state), - -1, + expectedShardSize, ShardRouting.Role.DEFAULT ); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java index d1835459ab932..66f536fd378cb 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java +++ b/test/framework/src/main/java/org/elasticsearch/search/RandomSearchRequestGenerator.java @@ -327,12 +327,15 @@ public static SearchSourceBuilder randomSearchSourceBuilder( } jsonBuilder.endArray(); jsonBuilder.endObject(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(XContentParserConfiguration.EMPTY, BytesReference.bytes(jsonBuilder).streamInput()); - parser.nextToken(); - parser.nextToken(); - parser.nextToken(); - builder.searchAfter(SearchAfterBuilder.fromXContent(parser).getSortValues()); + try ( + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(XContentParserConfiguration.EMPTY, BytesReference.bytes(jsonBuilder).streamInput()) + ) { + parser.nextToken(); + parser.nextToken(); + parser.nextToken(); + builder.searchAfter(SearchAfterBuilder.fromXContent(parser).getSortValues()); + } } catch (IOException e) { throw new RuntimeException("Error building search_from", e); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java index 589bc76c55a3d..3950683ca0c9d 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/search/SearchResponseUtils.java @@ -9,6 +9,12 @@ import org.apache.lucene.search.TotalHits; import org.elasticsearch.action.search.SearchRequestBuilder; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.client.Response; +import org.elasticsearch.test.rest.ESRestTestCase; + +import java.io.IOException; public enum SearchResponseUtils { ; @@ -25,4 +31,37 @@ public static TotalHits getTotalHits(SearchRequestBuilder request) { public static long getTotalHitsValue(SearchRequestBuilder request) { return getTotalHits(request).value; } + + public static SearchResponse responseAsSearchResponse(Response searchResponse) throws IOException { + try (var parser = ESRestTestCase.responseAsParser(searchResponse)) { + return SearchResponse.fromXContent(parser); + } + } + + public static SearchResponse emptyWithTotalHits( + String scrollId, + int totalShards, + int successfulShards, + int skippedShards, + long tookInMillis, + ShardSearchFailure[] shardFailures, + SearchResponse.Clusters clusters + ) { + return new SearchResponse( + SearchHits.EMPTY_WITH_TOTAL_HITS, + null, + null, + false, + null, + null, + 1, + scrollId, + totalShards, + successfulShards, + skippedShards, + tookInMillis, + shardFailures, + clusters + ); + } } diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java index c03058f22da5d..3b347c50671cc 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/BaseAggregationTestCase.java @@ -51,11 +51,12 @@ public void testFromXContent() throws IOException { } factoriesBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); - XContentParser parser = createParser(shuffled); - AggregationBuilder newAgg = parse(parser); - assertNotSame(newAgg, testAgg); - assertEquals(testAgg, newAgg); - assertEquals(testAgg.hashCode(), newAgg.hashCode()); + try (XContentParser parser = createParser(shuffled)) { + AggregationBuilder newAgg = parse(parser); + assertNotSame(newAgg, testAgg); + assertEquals(testAgg, newAgg); + assertEquals(testAgg.hashCode(), newAgg.hashCode()); + } } public void testSupportsConcurrentExecution() { @@ -85,10 +86,12 @@ public void testFromXContentMulti() throws IOException { } factoriesBuilder.toXContent(builder, ToXContent.EMPTY_PARAMS); XContentBuilder shuffled = shuffleXContent(builder); - XContentParser parser = createParser(shuffled); - assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); - AggregatorFactories.Builder parsed = AggregatorFactories.parseAggregators(parser); + AggregatorFactories.Builder parsed; + try (XContentParser parser = createParser(shuffled)) { + assertSame(XContentParser.Token.START_OBJECT, parser.nextToken()); + parsed = AggregatorFactories.parseAggregators(parser); + } assertThat(parsed.getAggregatorFactories(), hasSize(testAggs.size())); assertThat(parsed.getPipelineAggregatorFactories(), hasSize(0)); @@ -127,8 +130,10 @@ public void testSerializationMulti() throws IOException { public void testToString() throws IOException { AB testAgg = createTestAggregatorBuilder(); String toString = randomBoolean() ? Strings.toString(testAgg) : testAgg.toString(); - XContentParser parser = createParser(XContentType.JSON.xContent(), toString); - AggregationBuilder newAgg = parse(parser); + AggregationBuilder newAgg; + try (XContentParser parser = createParser(XContentType.JSON.xContent(), toString)) { + newAgg = parse(parser); + } assertNotSame(newAgg, testAgg); assertEquals(testAgg, newAgg); assertEquals(testAgg.hashCode(), newAgg.hashCode()); diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractSignificanceHeuristicTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractSignificanceHeuristicTestCase.java index 3967a86ea7065..519b67aae556e 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractSignificanceHeuristicTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/bucket/AbstractSignificanceHeuristicTestCase.java @@ -197,8 +197,10 @@ public void testParseFromAggBuilder() throws IOException { stBuilder.significanceHeuristic(significanceHeuristic).field("text").minDocCount(200); XContentBuilder stXContentBuilder = XContentFactory.jsonBuilder(); stBuilder.internalXContent(stXContentBuilder, null); - XContentParser stParser = createParser(JsonXContent.jsonXContent, Strings.toString(stXContentBuilder)); - SignificanceHeuristic parsedHeuristic = parseSignificanceHeuristic(stParser); + SignificanceHeuristic parsedHeuristic; + try (XContentParser stParser = createParser(JsonXContent.jsonXContent, Strings.toString(stXContentBuilder))) { + parsedHeuristic = parseSignificanceHeuristic(stParser); + } assertThat(significanceHeuristic, equalTo(parsedHeuristic)); } diff --git a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java index 13131a5e3eef7..52d2f3f53a43e 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/geo/BasePointShapeQueryTestCase.java @@ -373,7 +373,8 @@ public void testQueryWithinMultiLine() throws Exception { try { client().prepareSearch(defaultIndexName) .setQuery(queryBuilder().shapeQuery(defaultFieldName, multiline).relation(ShapeRelation.WITHIN)) - .get(); + .get() + .decRef(); } catch (SearchPhaseExecutionException e) { assertThat(e.getCause().getMessage(), containsString("Field [" + defaultFieldName + "] found an unsupported shape Line")); } diff --git a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java index 5ab1641307fc5..e1641804d20b7 100644 --- a/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/snapshots/AbstractSnapshotIntegTestCase.java @@ -405,24 +405,25 @@ protected String initWithSnapshotVersion(String repoName, Path repoPath, IndexVe oldVersionString = currentVersionString.replace(",\"index_version\":" + IndexVersion.current(), "") .replace(",\"version\":\"8.11.0\"", ",\"version\":\"" + Version.fromId(version.id()) + "\""); } - final RepositoryData downgradedRepoData = RepositoryData.snapshotsFromXContent( - JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, oldVersionString), - repositoryData.getGenId(), - randomBoolean() - ); + final RepositoryData downgradedRepoData; + try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, oldVersionString)) { + downgradedRepoData = RepositoryData.snapshotsFromXContent(parser, repositoryData.getGenId(), randomBoolean()); + } Files.write( repoPath.resolve(BlobStoreRepository.INDEX_FILE_PREFIX + repositoryData.getGenId()), BytesReference.toBytes(BytesReference.bytes(downgradedRepoData.snapshotsToXContent(XContentFactory.jsonBuilder(), version))), StandardOpenOption.TRUNCATE_EXISTING ); - final SnapshotInfo downgradedSnapshotInfo = SnapshotInfo.fromXContentInternal( - repoName, - JsonXContent.jsonXContent.createParser( + final SnapshotInfo downgradedSnapshotInfo; + try ( + var parser = JsonXContent.jsonXContent.createParser( XContentParserConfiguration.EMPTY, Strings.toString(snapshotInfo, ChecksumBlobStoreFormat.SNAPSHOT_ONLY_FORMAT_PARAMS) .replace(IndexVersion.current().toString(), version.toString()) ) - ); + ) { + downgradedSnapshotInfo = SnapshotInfo.fromXContentInternal(repoName, parser); + } final BlobStoreRepository blobStoreRepository = getRepositoryOnMaster(repoName); PlainActionFuture.get( f -> blobStoreRepository.threadPool() @@ -503,7 +504,7 @@ protected void indexRandomDocs(String index, int numdocs) throws InterruptedExce protected long getCountForIndex(String indexName) { return SearchResponseUtils.getTotalHitsValue( - client().prepareSearch(indexName).setSource(new SearchSourceBuilder().size(0).trackTotalHits(true)) + prepareSearch(indexName).setSource(new SearchSourceBuilder().size(0).trackTotalHits(true)) ); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java index 6137b2b0aad18..d3833fdb3a778 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractQueryTestCase.java @@ -152,10 +152,14 @@ public void testFromXContent() throws IOException { randomBoolean(), shuffleProtectedFields() ); - assertParsedQuery(createParser(xContentType.xContent(), shuffledXContent), testQuery); + try (var parser = createParser(xContentType.xContent(), shuffledXContent)) { + assertParsedQuery(parser, testQuery); + } for (Map.Entry alternateVersion : getAlternateVersions().entrySet()) { String queryAsString = alternateVersion.getKey(); - assertParsedQuery(createParser(JsonXContent.jsonXContent, queryAsString), alternateVersion.getValue()); + try (var parser = createParser(JsonXContent.jsonXContent, queryAsString)) { + assertParsedQuery(parser, alternateVersion.getValue()); + } } } } @@ -281,7 +285,7 @@ static List> alterateQueries(Set queries, Map levels = new LinkedList<>(); @@ -388,7 +392,7 @@ private void queryWrappedInArrayTest(String queryName, String validQuery) { + "[" + validQuery.substring(insertionPosition, endArrayPosition) + "]" - + validQuery.substring(endArrayPosition, validQuery.length()); + + validQuery.substring(endArrayPosition); ParsingException e = expectThrows(ParsingException.class, () -> parseQuery(testQuery)); assertEquals("[" + queryName + "] query malformed, no start_object after query name", e.getMessage()); @@ -424,12 +428,15 @@ private void assertParsedQuery(XContentParser parser, QueryBuilder expectedQuery protected QueryBuilder parseQuery(AbstractQueryBuilder builder) throws IOException { BytesReference bytes = XContentHelper.toXContent(builder, XContentType.JSON, false); - return parseQuery(createParser(JsonXContent.jsonXContent, bytes)); + try (var parser = createParser(JsonXContent.jsonXContent, bytes)) { + return parseQuery(parser); + } } protected QueryBuilder parseQuery(String queryAsString) throws IOException { - XContentParser parser = createParser(JsonXContent.jsonXContent, queryAsString); - return parseQuery(parser); + try (XContentParser parser = createParser(JsonXContent.jsonXContent, queryAsString)) { + return parseQuery(parser); + } } protected QueryBuilder parseQuery(XContentParser parser) throws IOException { @@ -464,13 +471,15 @@ public void testToQuery() throws IOException { assertNotNull("toQuery should not return null", firstLuceneQuery); assertLuceneQuery(firstQuery, firstLuceneQuery, context); // remove after assertLuceneQuery since the assertLuceneQuery impl might access the context as well - assertTrue( + assertEquals( "query is not equal to its copy after calling toQuery, firstQuery: " + firstQuery + ", secondQuery: " + controlQuery, - firstQuery.equals(controlQuery) + firstQuery, + controlQuery ); - assertTrue( + assertEquals( "equals is not symmetric after calling toQuery, firstQuery: " + firstQuery + ", secondQuery: " + controlQuery, - controlQuery.equals(firstQuery) + controlQuery, + firstQuery ); assertThat( "query copy's hashcode is different from original hashcode after calling toQuery, firstQuery: " @@ -552,7 +561,7 @@ protected boolean supportsQueryName() { * and {@link SearchExecutionContext}. Verifies that named queries and boost are properly handled and delegates to * {@link #doAssertLuceneQuery(AbstractQueryBuilder, Query, SearchExecutionContext)} for query specific checks. */ - private void assertLuceneQuery(QB queryBuilder, Query query, SearchExecutionContext context) throws IOException { + protected void assertLuceneQuery(QB queryBuilder, Query query, SearchExecutionContext context) throws IOException { if (queryBuilder.queryName() != null && query instanceof MatchNoDocsQuery == false) { Query namedQuery = context.copyNamedQueries().get(queryBuilder.queryName()); assertThat(namedQuery, equalTo(query)); @@ -649,9 +658,13 @@ public void testValidOutput() throws IOException { QB testQuery = createTestQueryBuilder(); XContentType xContentType = XContentType.JSON; String toString = Strings.toString(testQuery); - assertParsedQuery(createParser(xContentType.xContent(), toString), testQuery); + try (var parser = createParser(xContentType.xContent(), toString)) { + assertParsedQuery(parser, testQuery); + } BytesReference bytes = XContentHelper.toXContent(testQuery, xContentType, false); - assertParsedQuery(createParser(xContentType.xContent(), bytes), testQuery); + try (var parser = createParser(xContentType.xContent(), bytes)) { + assertParsedQuery(parser, testQuery); + } } } @@ -671,7 +684,7 @@ protected QB changeNameOrBoost(QB original) throws IOException { // we use the streaming infra to create a copy of the query provided as argument @SuppressWarnings("unchecked") - private QB copyQuery(QB query) throws IOException { + protected QB copyQuery(QB query) throws IOException { Reader reader = (Reader) namedWriteableRegistry().getReader(QueryBuilder.class, query.getWriteableName()); return copyWriteable(query, namedWriteableRegistry(), reader); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java index fa4d196ceaeda..770c56f9c5952 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java @@ -154,8 +154,10 @@ public void test() throws IOException { randomFieldsExcludeFilter, createParser ); - XContentParser parser = createParser.apply(XContentFactory.xContent(xContentType), shuffledContent); - T parsed = fromXContent.apply(parser); + final T parsed; + try (XContentParser parser = createParser.apply(XContentFactory.xContent(xContentType), shuffledContent)) { + parsed = fromXContent.apply(parser); + } try { assertEqualsConsumer.accept(testInstance, parsed); if (assertToXContentEquivalence) { @@ -221,6 +223,34 @@ public static void testFromXContent( BiConsumer assertEqualsConsumer, boolean assertToXContentEquivalence, ToXContent.Params toXContentParams + ) throws IOException { + testFromXContent( + numberOfTestRuns, + instanceSupplier, + supportsUnknownFields, + shuffleFieldsExceptions, + randomFieldsExcludeFilter, + createParserFunction, + fromXContent, + assertEqualsConsumer, + assertToXContentEquivalence, + toXContentParams, + t -> {} + ); + } + + public static void testFromXContent( + int numberOfTestRuns, + Supplier instanceSupplier, + boolean supportsUnknownFields, + String[] shuffleFieldsExceptions, + Predicate randomFieldsExcludeFilter, + CheckedBiFunction createParserFunction, + CheckedFunction fromXContent, + BiConsumer assertEqualsConsumer, + boolean assertToXContentEquivalence, + ToXContent.Params toXContentParams, + Consumer dispose ) throws IOException { xContentTester(createParserFunction, instanceSupplier, toXContentParams, fromXContent).numberOfTestRuns(numberOfTestRuns) .supportsUnknownFields(supportsUnknownFields) @@ -228,6 +258,7 @@ public static void testFromXContent( .randomFieldsExcludeFilter(randomFieldsExcludeFilter) .assertEqualsConsumer(assertEqualsConsumer) .assertToXContentEquivalence(assertToXContentEquivalence) + .dispose(dispose) .test(); } @@ -246,10 +277,17 @@ public final void testFromXContent() throws IOException { this::parseInstance, this::assertEqualInstances, assertToXContentEquivalence(), - getToXContentParams() + getToXContentParams(), + this::dispose ); } + /** + * Callback invoked after a test instance is no longer needed that can be overridden to release resources associated with the instance. + * @param instance test instance that is no longer used + */ + protected void dispose(T instance) {} + /** * Creates a random test instance to use in the tests. This method will be * called multiple times during test execution and should return a different @@ -320,8 +358,9 @@ static BytesReference insertRandomFieldsAndShuffle( } else { withRandomFields = xContent; } - XContentParser parserWithRandomFields = createParserFunction.apply(XContentFactory.xContent(xContentType), withRandomFields); - return BytesReference.bytes(ESTestCase.shuffleXContent(parserWithRandomFields, false, shuffleFieldsExceptions)); + try (XContentParser parserWithRandomFields = createParserFunction.apply(XContentFactory.xContent(xContentType), withRandomFields)) { + return BytesReference.bytes(ESTestCase.shuffleXContent(parserWithRandomFields, false, shuffleFieldsExceptions)); + } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java index 23721de4aad9c..72fa522686632 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESIntegTestCase.java @@ -26,12 +26,12 @@ import org.elasticsearch.action.admin.cluster.allocation.ClusterAllocationExplainResponse; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads; -import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse; import org.elasticsearch.action.admin.cluster.node.info.NodeInfo; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksRequest; import org.elasticsearch.action.admin.cluster.tasks.PendingClusterTasksResponse; +import org.elasticsearch.action.admin.cluster.tasks.TransportPendingClusterTasksAction; import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder; import org.elasticsearch.action.admin.indices.flush.FlushResponse; import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeResponse; @@ -119,6 +119,7 @@ import org.elasticsearch.indices.IndicesQueryCache; import org.elasticsearch.indices.IndicesRequestCache; import org.elasticsearch.indices.store.IndicesStore; +import org.elasticsearch.monitor.jvm.HotThreads; import org.elasticsearch.node.NodeMocksPlugin; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.plugins.NetworkPlugin; @@ -153,6 +154,7 @@ import org.junit.BeforeClass; import java.io.IOException; +import java.io.StringWriter; import java.lang.annotation.Annotation; import java.lang.annotation.ElementType; import java.lang.annotation.Inherited; @@ -860,7 +862,10 @@ public void waitNoPendingTasksOnAll() throws Exception { for (Client client : clients()) { ClusterHealthResponse clusterHealth = client.admin().cluster().prepareHealth().setLocal(true).get(); assertThat("client " + client + " still has in flight fetch", clusterHealth.getNumberOfInFlightFetch(), equalTo(0)); - PendingClusterTasksResponse pendingTasks = client.admin().cluster().preparePendingClusterTasks().setLocal(true).get(); + PendingClusterTasksResponse pendingTasks = client.execute( + TransportPendingClusterTasksAction.TYPE, + new PendingClusterTasksRequest().local(true) + ).get(); assertThat( "client " + client + " still has pending tasks " + pendingTasks, pendingTasks.pendingTasks(), @@ -971,17 +976,24 @@ private ClusterHealthStatus ensureColor( final var allocationExplainRef = new AtomicReference(); final var clusterStateRef = new AtomicReference(); final var pendingTasksRef = new AtomicReference(); - final var hotThreadsRef = new AtomicReference(); + final var hotThreadsRef = new AtomicReference(); final var detailsFuture = new PlainActionFuture(); try (var listeners = new RefCountingListener(detailsFuture)) { clusterAdmin().prepareAllocationExplain().execute(listeners.acquire(allocationExplainRef::set)); clusterAdmin().prepareState().execute(listeners.acquire(clusterStateRef::set)); - clusterAdmin().preparePendingClusterTasks().execute(listeners.acquire(pendingTasksRef::set)); - clusterAdmin().prepareNodesHotThreads() - .setThreads(9999) - .setIgnoreIdleThreads(false) - .execute(listeners.acquire(hotThreadsRef::set)); + client().execute( + TransportPendingClusterTasksAction.TYPE, + new PendingClusterTasksRequest(), + listeners.acquire(pendingTasksRef::set) + ); + try (var writer = new StringWriter()) { + new HotThreads().busiestThreads(9999).ignoreIdleThreads(false).detect(writer); + hotThreadsRef.set(writer.toString()); + } catch (Exception e) { + logger.error("exception capturing hot threads", e); + hotThreadsRef.set("exception capturing hot threads: " + e); + } } try { @@ -996,10 +1008,7 @@ private ClusterHealthStatus ensureColor( safeFormat(allocationExplainRef.get(), r -> Strings.toString(r.getExplanation(), true, true)), safeFormat(clusterStateRef.get(), r -> r.getState().toString()), safeFormat(pendingTasksRef.get(), r -> Strings.toString(r, true, true)), - safeFormat( - hotThreadsRef.get(), - r -> r.getNodes().stream().map(NodeHotThreads::getHotThreads).collect(Collectors.joining("\n")) - ) + hotThreadsRef.get() ); fail("timed out waiting for " + color + " state"); } @@ -1039,7 +1048,7 @@ public ClusterHealthStatus waitForRelocation(ClusterHealthStatus status) { "waitForRelocation timed out (status={}), cluster state:\n{}\n{}", status, clusterAdmin().prepareState().get().getState(), - clusterAdmin().preparePendingClusterTasks().get() + getClusterPendingTasks() ); assertThat("timed out waiting for relocation", actionGet.isTimedOut(), equalTo(false)); } @@ -1049,6 +1058,18 @@ public ClusterHealthStatus waitForRelocation(ClusterHealthStatus status) { return actionGet.getStatus(); } + public static PendingClusterTasksResponse getClusterPendingTasks() { + return getClusterPendingTasks(client()); + } + + public static PendingClusterTasksResponse getClusterPendingTasks(Client client) { + try { + return client.execute(TransportPendingClusterTasksAction.TYPE, new PendingClusterTasksRequest()).get(10, TimeUnit.SECONDS); + } catch (Exception e) { + return fail(e); + } + } + /** * Waits until at least a give number of document is visible for searchers * @@ -1145,11 +1166,7 @@ public static DiscoveryNode waitAndGetHealthNode(InternalTestCluster internalClu * Prints the current cluster state as debug logging. */ public void logClusterState() { - logger.debug( - "cluster state:\n{}\n{}", - clusterAdmin().prepareState().get().getState(), - clusterAdmin().preparePendingClusterTasks().get() - ); + logger.debug("cluster state:\n{}\n{}", clusterAdmin().prepareState().get().getState(), getClusterPendingTasks()); } protected void ensureClusterSizeConsistency() { diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java index 1517571878fa2..b201d58ac0e23 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java @@ -421,7 +421,7 @@ public ClusterHealthStatus ensureGreen(TimeValue timeout, String... indices) { logger.info( "ensureGreen timed out, cluster state:\n{}\n{}", clusterAdmin().prepareState().get().getState(), - clusterAdmin().preparePendingClusterTasks().get() + ESIntegTestCase.getClusterPendingTasks(client()) ); assertThat("timed out waiting for green state", actionGet.isTimedOut(), equalTo(false)); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java b/test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java index 5392986c25507..3adf92e30e15d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java +++ b/test/framework/src/main/java/org/elasticsearch/test/client/RandomizingClient.java @@ -14,9 +14,11 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchType; import org.elasticsearch.client.internal.Client; +import org.elasticsearch.client.internal.ElasticsearchClient; import org.elasticsearch.client.internal.FilterClient; import org.elasticsearch.cluster.routing.Preference; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.search.builder.PointInTimeBuilder; import java.util.Arrays; import java.util.Random; @@ -59,7 +61,7 @@ public RandomizingClient(Client client, Random random) { @Override public SearchRequestBuilder prepareSearch(String... indices) { - SearchRequestBuilder searchRequestBuilder = in.prepareSearch(indices) + SearchRequestBuilder searchRequestBuilder = new RandomizedSearchRequestBuilder(this).setIndices(indices) .setSearchType(defaultSearchType) .setPreference(defaultPreference) .setBatchedReduceSize(batchedReduceSize); @@ -84,4 +86,18 @@ public Client in() { return super.in(); } + private class RandomizedSearchRequestBuilder extends SearchRequestBuilder { + RandomizedSearchRequestBuilder(ElasticsearchClient client) { + super(client); + } + + @Override + public SearchRequestBuilder setPointInTime(PointInTimeBuilder pointInTimeBuilder) { + if (defaultPreference != null) { + setPreference(null); + } + return super.setPointInTime(pointInTimeBuilder); + } + } + } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 6dc1f57030140..542c5d6bdaec6 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -107,7 +107,6 @@ import java.util.Objects; import java.util.Optional; import java.util.Set; -import java.util.TreeSet; import java.util.concurrent.TimeUnit; import java.util.function.Consumer; import java.util.function.Predicate; @@ -120,6 +119,7 @@ import static java.util.Collections.sort; import static java.util.Collections.unmodifiableList; import static org.elasticsearch.client.RestClient.IGNORE_RESPONSE_CODES_PARAM; +import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; import static org.elasticsearch.core.Strings.format; import static org.hamcrest.Matchers.anyOf; import static org.hamcrest.Matchers.containsString; @@ -215,9 +215,27 @@ public enum ProductFeature { } private static EnumSet availableFeatures; - private static Set nodeVersions; + private static Set nodesVersions; private static TestFeatureService testFeatureService; + protected static Set getCachedNodesVersions() { + assert nodesVersions != null; + return nodesVersions; + } + + protected static Set readVersionsFromNodesInfo(RestClient adminClient) throws IOException { + return getNodesInfo(adminClient).values().stream().map(nodeInfo -> nodeInfo.get("version").toString()).collect(Collectors.toSet()); + } + + protected static Map> getNodesInfo(RestClient adminClient) throws IOException { + Map response = entityAsMap(adminClient.performRequest(new Request("GET", "_nodes/plugins"))); + Map nodes = (Map) response.get("nodes"); + + return nodes.entrySet() + .stream() + .collect(Collectors.toUnmodifiableMap(entry -> entry.getKey().toString(), entry -> (Map) entry.getValue())); + } + protected static boolean clusterHasFeature(String featureId) { return testFeatureService.clusterHasFeature(featureId); } @@ -232,7 +250,7 @@ public void initClient() throws IOException { assert adminClient == null; assert clusterHosts == null; assert availableFeatures == null; - assert nodeVersions == null; + assert nodesVersions == null; assert testFeatureService == null; clusterHosts = parseClusterHosts(getTestRestCluster()); logger.info("initializing REST clients against {}", clusterHosts); @@ -240,16 +258,12 @@ public void initClient() throws IOException { adminClient = buildClient(restAdminSettings(), clusterHosts.toArray(new HttpHost[clusterHosts.size()])); availableFeatures = EnumSet.of(ProductFeature.LEGACY_TEMPLATES); - nodeVersions = new TreeSet<>(); - var semanticNodeVersions = new HashSet(); + Set versions = new HashSet<>(); boolean serverless = false; - Map response = entityAsMap(adminClient.performRequest(new Request("GET", "_nodes/plugins"))); - Map nodes = (Map) response.get("nodes"); - for (Map.Entry node : nodes.entrySet()) { - Map nodeInfo = (Map) node.getValue(); + + for (Map nodeInfo : getNodesInfo(adminClient).values()) { var nodeVersion = nodeInfo.get("version").toString(); - nodeVersions.add(nodeVersion); - parseLegacyVersion(nodeVersion).map(semanticNodeVersions::add); + versions.add(nodeVersion); for (Object module : (List) nodeInfo.get("modules")) { Map moduleInfo = (Map) module; final String moduleName = moduleInfo.get("name").toString(); @@ -288,21 +302,15 @@ public void initClient() throws IOException { ); } } + nodesVersions = Collections.unmodifiableSet(versions); + var semanticNodeVersions = nodesVersions.stream() + .map(ESRestTestCase::parseLegacyVersion) + .flatMap(Optional::stream) + .collect(Collectors.toSet()); assert semanticNodeVersions.isEmpty() == false || serverless; - // Historical features information is unavailable when using legacy test plugins - boolean hasHistoricalFeaturesInformation = System.getProperty("tests.features.metadata.path") != null; - var providers = hasHistoricalFeaturesInformation - ? List.of(new RestTestLegacyFeatures(), new ESRestTestCaseHistoricalFeatures()) - : List.of(new RestTestLegacyFeatures()); - - testFeatureService = new TestFeatureService( - hasHistoricalFeaturesInformation, - providers, - semanticNodeVersions, - ClusterFeatures.calculateAllNodeFeatures(getClusterStateFeatures().values()) - ); + testFeatureService = createTestFeatureService(adminClient, semanticNodeVersions); } assert testFeatureService != null; @@ -310,7 +318,23 @@ public void initClient() throws IOException { assert adminClient != null; assert clusterHosts != null; assert availableFeatures != null; - assert nodeVersions != null; + assert nodesVersions != null; + } + + protected static TestFeatureService createTestFeatureService(RestClient adminClient, Set semanticNodeVersions) + throws IOException { + // Historical features information is unavailable when using legacy test plugins + boolean hasHistoricalFeaturesInformation = System.getProperty("tests.features.metadata.path") != null; + var providers = hasHistoricalFeaturesInformation + ? List.of(new RestTestLegacyFeatures(), new ESRestTestCaseHistoricalFeatures()) + : List.of(new RestTestLegacyFeatures()); + + return new TestFeatureService( + hasHistoricalFeaturesInformation, + providers, + semanticNodeVersions, + ClusterFeatures.calculateAllNodeFeatures(getClusterStateFeatures(adminClient).values()) + ); } protected static boolean has(ProductFeature feature) { @@ -414,7 +438,7 @@ private boolean isExclusivelyTargetingCurrentVersionCluster() { public static RequestOptions expectVersionSpecificWarnings(Consumer expectationsSetter) { Builder builder = RequestOptions.DEFAULT.toBuilder(); - VersionSensitiveWarningsHandler warningsHandler = new VersionSensitiveWarningsHandler(new HashSet<>(nodeVersions)); + VersionSensitiveWarningsHandler warningsHandler = new VersionSensitiveWarningsHandler(getCachedNodesVersions()); expectationsSetter.accept(warningsHandler); builder.setWarningsHandler(warningsHandler); return builder.build(); @@ -483,7 +507,7 @@ public static void closeClients() throws IOException { client = null; adminClient = null; availableFeatures = null; - nodeVersions = null; + nodesVersions = null; testFeatureService = null; } } @@ -1227,7 +1251,9 @@ protected static RefreshResponse refresh(String index) throws IOException { protected static RefreshResponse refresh(RestClient client, String index) throws IOException { Request refreshRequest = new Request("POST", "/" + index + "/_refresh"); Response response = client.performRequest(refreshRequest); - return RefreshResponse.fromXContent(responseAsParser(response)); + try (var parser = responseAsParser(response)) { + return RefreshResponse.fromXContent(parser); + } } private static void waitForPendingRollupTasks() throws Exception { @@ -1684,7 +1710,9 @@ public static CreateIndexResponse createIndex(RestClient client, String name, Se entity += "}"; request.setJsonEntity(entity); Response response = client.performRequest(request); - return CreateIndexResponse.fromXContent(responseAsParser(response)); + try (var parser = responseAsParser(response)) { + return CreateIndexResponse.fromXContent(parser); + } } protected static AcknowledgedResponse deleteIndex(String name) throws IOException { @@ -1694,7 +1722,9 @@ protected static AcknowledgedResponse deleteIndex(String name) throws IOExceptio protected static AcknowledgedResponse deleteIndex(RestClient restClient, String name) throws IOException { Request request = new Request("DELETE", "/" + name); Response response = restClient.performRequest(request); - return AcknowledgedResponse.fromXContent(responseAsParser(response)); + try (var parser = responseAsParser(response)) { + return AcknowledgedResponse.fromXContent(parser); + } } protected static void updateIndexSettings(String index, Settings.Builder settings) throws IOException { @@ -1817,7 +1847,7 @@ protected static Map responseAsMap(Response response) throws IOE return responseEntity; } - protected static XContentParser responseAsParser(Response response) throws IOException { + public static XContentParser responseAsParser(Response response) throws IOException { return XContentHelper.createParser(XContentParserConfiguration.EMPTY, responseAsBytes(response), XContentType.JSON); } @@ -1940,10 +1970,12 @@ protected static boolean isXPackTemplate(String name) { || name.startsWith("logs-apm")) { return true; } + if (name.startsWith(".slm-history") || name.startsWith("ilm-history")) { + return true; + } switch (name) { case ".watches": case "security_audit_log": - case ".slm-history": case ".async-search": case ".profiling-ilm-lock": // TODO: Remove after switch to K/V indices case "saml-service-provider": @@ -1958,7 +1990,6 @@ protected static boolean isXPackTemplate(String name) { case "synthetics-settings": case "synthetics-mappings": case ".snapshot-blob-cache": - case "ilm-history": case "logstash-index-template": case "security-index-template": case "data-streams-mappings": @@ -2063,11 +2094,11 @@ public void ensurePeerRecoveryRetentionLeasesRenewedAndSynced(String index) thro }, 60, TimeUnit.SECONDS); } - private static Map> getClusterStateFeatures() throws IOException { + private static Map> getClusterStateFeatures(RestClient adminClient) throws IOException { final Request request = new Request("GET", "_cluster/state"); request.addParameter("filter_path", "nodes_features"); - final Response response = adminClient().performRequest(request); + final Response response = adminClient.performRequest(request); var responseData = responseAsMap(response); if (responseData.get("nodes_features") instanceof List nodesFeatures) { @@ -2149,7 +2180,7 @@ protected static TransportVersion getTransportVersionWithFallback( // In that case the transport_version field won't exist. Use version, but only for <8.8.0: after that versions diverge. var version = parseLegacyVersion(versionField); assert version.isPresent(); - if (version.get().before(Version.V_8_8_0)) { + if (version.get().before(VERSION_INTRODUCING_TRANSPORT_VERSIONS)) { return TransportVersion.fromId(version.get().id); } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java index fd5bd253fd8e5..ca8f339026b6b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/RestTestLegacyFeatures.java @@ -16,6 +16,7 @@ import java.util.Map; import static java.util.Map.entry; +import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; /** * This class groups historical features that have been removed from the production codebase, but are still used by the test @@ -37,6 +38,19 @@ public class RestTestLegacyFeatures implements FeatureSpecification { public static final NodeFeature DELETE_TEMPLATE_MULTIPLE_NAMES_SUPPORTED = new NodeFeature( "indices.delete_template_multiple_names_supported" ); + public static final NodeFeature ML_NEW_MEMORY_FORMAT = new NodeFeature("ml.new_memory_format"); + + /** These are "pure test" features: normally we would not need them, and test for TransportVersion/fallback to Version (see for example + * {@code ESRestTestCase#minimumTransportVersion()}. However, some tests explicitly check and validate the content of a response, so + * we need these features to support them. + */ + public static final NodeFeature TRANSPORT_VERSION_SUPPORTED = new NodeFeature("transport_version_supported"); + public static final NodeFeature STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION = new NodeFeature( + "state.transport_version_to_nodes_version" + ); + + // Ref: https://github.com/elastic/elasticsearch/pull/86416 + public static final NodeFeature ML_MEMORY_OVERHEAD_FIXED = new NodeFeature("ml.memory_overhead_fixed"); // QA - rolling upgrade tests public static final NodeFeature SECURITY_UPDATE_API_KEY = new NodeFeature("security.api_key_update"); @@ -73,6 +87,10 @@ public Map getHistoricalFeatures() { entry(ML_STATE_RESET_FALLBACK_ON_DISABLED, Version.V_8_7_0), entry(SECURITY_UPDATE_API_KEY, Version.V_8_4_0), entry(SECURITY_BULK_UPDATE_API_KEY, Version.V_8_5_0), + entry(ML_NEW_MEMORY_FORMAT, Version.V_8_11_0), + entry(TRANSPORT_VERSION_SUPPORTED, VERSION_INTRODUCING_TRANSPORT_VERSIONS), + entry(STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION, Version.V_8_11_0), + entry(ML_MEMORY_OVERHEAD_FIXED, Version.V_8_2_1), entry(WATCHES_VERSION_IN_META, Version.V_7_13_0), entry(SECURITY_ROLE_DESCRIPTORS_OPTIONAL, Version.V_7_3_0), entry(SEARCH_AGGREGATIONS_FORCE_INTERVAL_SELECTION_DATE_HISTOGRAM, Version.V_7_2_0), diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/TestFeatureService.java b/test/framework/src/main/java/org/elasticsearch/test/rest/TestFeatureService.java index 1f7a48add1f1c..b6627cdd99b70 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/TestFeatureService.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/TestFeatureService.java @@ -19,7 +19,7 @@ import java.util.Set; import java.util.function.Predicate; -class TestFeatureService { +public class TestFeatureService { private final Predicate historicalFeaturesPredicate; private final Set clusterStateFeatures; diff --git a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java index 2c313da69b42e..49fb38b518dce 100644 --- a/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java +++ b/test/test-clusters/src/main/java/org/elasticsearch/test/cluster/FeatureFlag.java @@ -16,7 +16,6 @@ */ public enum FeatureFlag { TIME_SERIES_MODE("es.index_mode_feature_flag_registered=true", Version.fromString("8.0.0"), null), - LEARNING_TO_RANK("es.learning_to_rank_feature_flag_enabled=true", Version.fromString("8.12.0"), null), FAILURE_STORE_ENABLED("es.failure_store_feature_flag_enabled=true", Version.fromString("8.12.0"), null); public final String systemProperty; diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java index c95fc5c131df0..38d090e455ebe 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlDocsTestClient.java @@ -10,7 +10,6 @@ import org.apache.http.HttpEntity; import org.apache.http.HttpHost; -import org.elasticsearch.Version; import org.elasticsearch.client.NodeSelector; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -27,7 +26,6 @@ import java.util.Map; import java.util.Objects; import java.util.function.BiPredicate; -import java.util.function.Predicate; /** * Used to execute REST requests according to the docs snippets that need to be tests. Wraps a @@ -40,12 +38,9 @@ public ClientYamlDocsTestClient( final ClientYamlSuiteRestSpec restSpec, final RestClient restClient, final List hosts, - final Version esVersion, - final Predicate clusterFeaturesPredicate, - final String os, final CheckedSupplier clientBuilderWithSniffedNodes ) { - super(restSpec, restClient, hosts, esVersion, clusterFeaturesPredicate, os, clientBuilderWithSniffedNodes); + super(restSpec, restClient, hosts, clientBuilderWithSniffedNodes); } @Override diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java index d30f65718943e..c57a9f3107393 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestClient.java @@ -20,7 +20,6 @@ import org.apache.http.util.EntityUtils; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.elasticsearch.Version; import org.elasticsearch.client.NodeSelector; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; @@ -47,7 +46,6 @@ import java.util.Map.Entry; import java.util.Set; import java.util.function.BiPredicate; -import java.util.function.Predicate; import java.util.stream.Collectors; import static com.carrotsearch.randomizedtesting.RandomizedTest.frequently; @@ -64,44 +62,20 @@ public class ClientYamlTestClient implements Closeable { private final ClientYamlSuiteRestSpec restSpec; private final Map restClients = new HashMap<>(); - private final Version esVersion; - private final String os; private final CheckedSupplier clientBuilderWithSniffedNodes; - private final Predicate clusterFeaturesPredicate; ClientYamlTestClient( final ClientYamlSuiteRestSpec restSpec, final RestClient restClient, final List hosts, - final Version esVersion, - final Predicate clusterFeaturesPredicate, - final String os, final CheckedSupplier clientBuilderWithSniffedNodes ) { - this.clusterFeaturesPredicate = clusterFeaturesPredicate; assert hosts.size() > 0; this.restSpec = restSpec; this.restClients.put(NodeSelector.ANY, restClient); - this.esVersion = esVersion; - this.os = os; this.clientBuilderWithSniffedNodes = clientBuilderWithSniffedNodes; } - /** - * @return the version of the oldest node in the cluster - */ - public Version getEsVersion() { - return esVersion; - } - - public boolean clusterHasFeature(String featureId) { - return clusterFeaturesPredicate.test(featureId); - } - - public String getOs() { - return os; - } - /** * Calls an api with the provided parameters and body */ diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java index a584280119ef3..e2dc6e810eace 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContext.java @@ -31,6 +31,7 @@ import java.util.List; import java.util.Map; import java.util.function.BiPredicate; +import java.util.function.Predicate; /** * Execution context passed across the REST tests. @@ -50,26 +51,48 @@ public class ClientYamlTestExecutionContext { private ClientYamlTestResponse response; + private final Version esVersion; + + private final String os; + private final Predicate clusterFeaturesPredicate; + private final boolean randomizeContentType; private final BiPredicate pathPredicate; public ClientYamlTestExecutionContext( ClientYamlTestCandidate clientYamlTestCandidate, ClientYamlTestClient clientYamlTestClient, - boolean randomizeContentType + boolean randomizeContentType, + final Version esVersion, + final Predicate clusterFeaturesPredicate, + final String os ) { - this(clientYamlTestCandidate, clientYamlTestClient, randomizeContentType, (ignoreApi, ignorePath) -> true); + this( + clientYamlTestCandidate, + clientYamlTestClient, + randomizeContentType, + esVersion, + clusterFeaturesPredicate, + os, + (ignoreApi, ignorePath) -> true + ); } public ClientYamlTestExecutionContext( ClientYamlTestCandidate clientYamlTestCandidate, ClientYamlTestClient clientYamlTestClient, boolean randomizeContentType, + final Version esVersion, + final Predicate clusterFeaturesPredicate, + final String os, BiPredicate pathPredicate ) { this.clientYamlTestClient = clientYamlTestClient; this.clientYamlTestCandidate = clientYamlTestCandidate; this.randomizeContentType = randomizeContentType; + this.esVersion = esVersion; + this.clusterFeaturesPredicate = clusterFeaturesPredicate; + this.os = os; this.pathPredicate = pathPredicate; } @@ -227,11 +250,11 @@ public Stash stash() { * @return the version of the oldest node in the cluster */ public Version esVersion() { - return clientYamlTestClient.getEsVersion(); + return esVersion; } public String os() { - return clientYamlTestClient.getOs(); + return os; } public ClientYamlTestCandidate getClientYamlTestCandidate() { @@ -239,6 +262,6 @@ public ClientYamlTestCandidate getClientYamlTestCandidate() { } public boolean clusterHasFeature(String featureId) { - return clientYamlTestClient.clusterHasFeature(featureId); + return clusterFeaturesPredicate.test(featureId); } } diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index 2e1631cc8c337..89be4cb38106b 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -151,9 +151,22 @@ public void initAndResetContext() throws Exception { hosts, os ); - clientYamlTestClient = initClientYamlTestClient(restSpec, client(), hosts, esVersion, ESRestTestCase::clusterHasFeature, os); - restTestExecutionContext = createRestTestExecutionContext(testCandidate, clientYamlTestClient); - adminExecutionContext = new ClientYamlTestExecutionContext(testCandidate, clientYamlTestClient, false); + clientYamlTestClient = initClientYamlTestClient(restSpec, client(), hosts); + restTestExecutionContext = createRestTestExecutionContext( + testCandidate, + clientYamlTestClient, + esVersion, + ESRestTestCase::clusterHasFeature, + os + ); + adminExecutionContext = new ClientYamlTestExecutionContext( + testCandidate, + clientYamlTestClient, + false, + esVersion, + ESRestTestCase::clusterHasFeature, + os + ); final String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null); blacklistPathMatchers = new ArrayList<>(); for (final String entry : blacklist) { @@ -179,30 +192,29 @@ public void initAndResetContext() throws Exception { */ protected ClientYamlTestExecutionContext createRestTestExecutionContext( ClientYamlTestCandidate clientYamlTestCandidate, - ClientYamlTestClient clientYamlTestClient - ) { - return new ClientYamlTestExecutionContext(clientYamlTestCandidate, clientYamlTestClient, randomizeContentType()); - } - - protected ClientYamlTestClient initClientYamlTestClient( - final ClientYamlSuiteRestSpec restSpec, - final RestClient restClient, - final List hosts, + ClientYamlTestClient clientYamlTestClient, final Version esVersion, final Predicate clusterFeaturesPredicate, final String os ) { - return new ClientYamlTestClient( - restSpec, - restClient, - hosts, + return new ClientYamlTestExecutionContext( + clientYamlTestCandidate, + clientYamlTestClient, + randomizeContentType(), esVersion, clusterFeaturesPredicate, - os, - this::getClientBuilderWithSniffedHosts + os ); } + protected ClientYamlTestClient initClientYamlTestClient( + final ClientYamlSuiteRestSpec restSpec, + final RestClient restClient, + final List hosts + ) { + return new ClientYamlTestClient(restSpec, restClient, hosts, this::getClientBuilderWithSniffedHosts); + } + @AfterClass public static void closeClient() throws IOException { try { @@ -419,7 +431,7 @@ Tuple readVersionsFromCatNodes(RestClient restClient) throws I return new Tuple<>(version, masterVersion); } - String readOsFromNodesInfo(RestClient restClient) throws IOException { + static String readOsFromNodesInfo(RestClient restClient) throws IOException { final Request request = new Request("GET", "/_nodes/os"); Response response = restClient.performRequest(request); ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response); diff --git a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java index bd038cc4dcd58..08631c148a7ed 100644 --- a/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java +++ b/test/yaml-rest-runner/src/main/java/org/elasticsearch/test/rest/yaml/section/DoSection.java @@ -188,10 +188,16 @@ public static DoSection parse(XContentParser parser) throws IOException { } else if (token.isValue()) { if ("body".equals(paramName)) { String body = parser.text(); - XContentParser bodyParser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, body); - // multiple bodies are supported e.g. in case of bulk provided as a whole string - while (bodyParser.nextToken() != null) { - apiCallSection.addBody(bodyParser.mapOrdered()); + try ( + XContentParser bodyParser = JsonXContent.jsonXContent.createParser( + XContentParserConfiguration.EMPTY, + body + ) + ) { + // multiple bodies are supported e.g. in case of bulk provided as a whole string + while (bodyParser.nextToken() != null) { + apiCallSection.addBody(bodyParser.mapOrdered()); + } } } else { apiCallSection.addParam(paramName, parser.text()); diff --git a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContextTests.java b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContextTests.java index 49cb509608ec1..1c3515cf02f98 100644 --- a/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContextTests.java +++ b/test/yaml-rest-runner/src/test/java/org/elasticsearch/test/rest/yaml/ClientYamlTestExecutionContextTests.java @@ -27,7 +27,14 @@ public class ClientYamlTestExecutionContextTests extends ESTestCase { public void testHeadersSupportStashedValueReplacement() throws IOException { final AtomicReference> headersRef = new AtomicReference<>(); final Version version = VersionUtils.randomVersion(random()); - final ClientYamlTestExecutionContext context = new ClientYamlTestExecutionContext(null, null, randomBoolean()) { + final ClientYamlTestExecutionContext context = new ClientYamlTestExecutionContext( + null, + null, + randomBoolean(), + version, + feature -> true, + "os" + ) { @Override ClientYamlTestResponse callApiInternal( String apiName, @@ -39,11 +46,6 @@ ClientYamlTestResponse callApiInternal( headersRef.set(headers); return null; } - - @Override - public Version esVersion() { - return version; - } }; final Map headers = new HashMap<>(); headers.put("foo", "$bar"); @@ -63,7 +65,14 @@ public Version esVersion() { public void testStashHeadersOnException() throws IOException { final Version version = VersionUtils.randomVersion(random()); - final ClientYamlTestExecutionContext context = new ClientYamlTestExecutionContext(null, null, randomBoolean()) { + final ClientYamlTestExecutionContext context = new ClientYamlTestExecutionContext( + null, + null, + randomBoolean(), + version, + feature -> true, + "os" + ) { @Override ClientYamlTestResponse callApiInternal( String apiName, @@ -74,11 +83,6 @@ ClientYamlTestResponse callApiInternal( ) { throw new RuntimeException("boom!"); } - - @Override - public Version esVersion() { - return version; - } }; final Map headers = new HashMap<>(); headers.put("Accept", "application/json"); diff --git a/x-pack/plugin/analytics/src/internalClusterTest/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsWithRequestBreakerIT.java b/x-pack/plugin/analytics/src/internalClusterTest/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsWithRequestBreakerIT.java index 6f13b3b4bc528..4c8666365f603 100644 --- a/x-pack/plugin/analytics/src/internalClusterTest/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsWithRequestBreakerIT.java +++ b/x-pack/plugin/analytics/src/internalClusterTest/java/org/elasticsearch/xpack/analytics/multiterms/MultiTermsWithRequestBreakerIT.java @@ -61,7 +61,7 @@ public void testRequestBreaker() throws Exception { new MultiValuesSourceFieldConfig.Builder().setFieldName("field1.keyword").build() ) ) - ).get(); + ).get().decRef(); } catch (ElasticsearchException e) { if (ExceptionsHelper.unwrap(e, CircuitBreakingException.class) == null) { throw e; diff --git a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java index 44621ee211838..f528d99133756 100644 --- a/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java +++ b/x-pack/plugin/apm-data/src/main/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistry.java @@ -134,7 +134,9 @@ protected List getIngestPipelines() { private static ComponentTemplate loadComponentTemplate(String name, int version) { try { final byte[] content = loadVersionedResourceUTF8("/component-templates/" + name + ".yaml", version); - return ComponentTemplate.parse(YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, content)); + try (var parser = YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, content)) { + return ComponentTemplate.parse(parser); + } } catch (Exception e) { throw new RuntimeException("failed to load APM Ingest plugin's component template: " + name, e); } @@ -143,7 +145,9 @@ private static ComponentTemplate loadComponentTemplate(String name, int version) private static ComposableIndexTemplate loadIndexTemplate(String name, int version) { try { final byte[] content = loadVersionedResourceUTF8("/index-templates/" + name + ".yaml", version); - return ComposableIndexTemplate.parse(YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, content)); + try (var parser = YamlXContent.yamlXContent.createParser(XContentParserConfiguration.EMPTY, content)) { + return ComposableIndexTemplate.parse(parser); + } } catch (Exception e) { throw new RuntimeException("failed to load APM Ingest plugin's index template: " + name, e); } diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/logs-apm.error@mappings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/logs-apm.error@mappings.yaml index c946403c795dd..e6353853bc4d5 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/logs-apm.error@mappings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/logs-apm.error@mappings.yaml @@ -6,21 +6,29 @@ _meta: template: mappings: properties: + # error.* error.custom: type: flattened error.exception.attributes: type: flattened error.exception.stacktrace: type: flattened + error.log.stacktrace: + type: flattened error.grouping_name: type: keyword script: | def logMessage = params['_source'].error?.log?.message; - if (logMessage != null) { + if (logMessage != null && logMessage != "") { emit(logMessage); return; } def exception = params['_source'].error?.exception; - if (exception != null && exception.length > 0) { + def exceptionMessage = exception != null && exception.length > 0 ? exception[0]?.message : null; + if (exceptionMessage != null && exceptionMessage != "") { emit(exception[0].message); } + + # http.* + http.request.body: + type: flattened diff --git a/x-pack/plugin/apm-data/src/main/resources/component-templates/traces-apm@mappings.yaml b/x-pack/plugin/apm-data/src/main/resources/component-templates/traces-apm@mappings.yaml index 558a5da81e4f7..eb2da017d97b7 100644 --- a/x-pack/plugin/apm-data/src/main/resources/component-templates/traces-apm@mappings.yaml +++ b/x-pack/plugin/apm-data/src/main/resources/component-templates/traces-apm@mappings.yaml @@ -6,15 +6,22 @@ _meta: template: mappings: properties: + # NOTE(axw) processor.event may be either "span" or "transaction". + # + # This field should eventually be removed, and we should end up + # with only spans. Some of those spans may be identified as local + # roots, equivalent in concept to transactions. processor.event: type: keyword + + # event.* event.success_count: type: byte index: false - span.duration.us: - type: long - transaction.duration.us: - type: long + + # http.* + http.request.body: + type: flattened http.response.transfer_size: type: long index: false @@ -24,10 +31,22 @@ template: http.response.decoded_body_size: type: long index: false + + # span.* + span.duration.us: + type: long span.representative_count: type: scaled_float scaling_factor: 1000 index: false + span.stacktrace: + type: flattened + + # transaction.* + transaction.custom: + type: flattened + transaction.duration.us: + type: long transaction.representative_count: type: scaled_float scaling_factor: 1000 diff --git a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java index eb9d440106dea..4ab7396fb1a9e 100644 --- a/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java +++ b/x-pack/plugin/apm-data/src/test/java/org/elasticsearch/xpack/apmdata/APMIndexTemplateRegistryTests.java @@ -51,13 +51,14 @@ import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.Set; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import static org.elasticsearch.xpack.core.XPackSettings.APM_DATA_ENABLED; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; @@ -137,30 +138,30 @@ public void testThatIndependentTemplatesAreAddedImmediatelyIfMissing() throws Ex assertThat(actualInstalledIndexTemplates.get(), equalTo(0)); } - public void testIngestPipelines() { + public void testIngestPipelines() throws Exception { DiscoveryNode node = DiscoveryNodeUtils.create("node"); DiscoveryNodes nodes = DiscoveryNodes.builder().localNodeId("node").masterNodeId("node").add(node).build(); final List pipelineConfigs = apmIndexTemplateRegistry.getIngestPipelines(); assertThat(pipelineConfigs, is(not(empty()))); - pipelineConfigs.forEach(ingestPipelineConfig -> { - AtomicInteger putPipelineRequestsLocal = new AtomicInteger(0); - client.setVerifier((a, r, l) -> { - if (r instanceof PutPipelineRequest && ingestPipelineConfig.getId().equals(((PutPipelineRequest) r).getId())) { - putPipelineRequestsLocal.incrementAndGet(); + final Set expectedPipelines = apmIndexTemplateRegistry.getIngestPipelines() + .stream() + .map(IngestPipelineConfig::getId) + .collect(Collectors.toSet()); + final Set installedPipelines = ConcurrentHashMap.newKeySet(pipelineConfigs.size()); + client.setVerifier((a, r, l) -> { + if (r instanceof PutPipelineRequest putPipelineRequest) { + if (expectedPipelines.contains(putPipelineRequest.getId())) { + installedPipelines.add(putPipelineRequest.getId()); } - return AcknowledgedResponse.TRUE; - }); - - apmIndexTemplateRegistry.clusterChanged( - createClusterChangedEvent(Map.of(), Map.of(), ingestPipelineConfig.getPipelineDependencies(), nodes) - ); - try { - assertBusy(() -> assertThat(putPipelineRequestsLocal.get(), greaterThanOrEqualTo(1))); - } catch (Exception e) { - throw new RuntimeException(e); } + return AcknowledgedResponse.TRUE; + }); + + assertBusy(() -> { + apmIndexTemplateRegistry.clusterChanged(createClusterChangedEvent(Map.of(), Map.of(), List.copyOf(installedPipelines), nodes)); + assertThat(installedPipelines, equalTo(expectedPipelines)); }); } @@ -309,7 +310,7 @@ private ClusterChangedEvent createClusterChangedEvent( private ClusterChangedEvent createClusterChangedEvent( Map existingComponentTemplates, Map existingComposableTemplates, - List ingestPipelines, + List existingIngestPipelines, Map existingPolicies, DiscoveryNodes nodes ) { @@ -317,7 +318,7 @@ private ClusterChangedEvent createClusterChangedEvent( Settings.EMPTY, existingComponentTemplates, existingComposableTemplates, - ingestPipelines, + existingIngestPipelines, existingPolicies, nodes ); diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml index b8fdebf9a938b..f4397ca18c101 100644 --- a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/10_apm.yml @@ -82,110 +82,3 @@ setup: - length: {hits.hits: 1} - match: {hits.hits.0.fields.event\.success_count: [1]} - match: {hits.hits.0.fields.span\.duration\.us: [123]} - ---- -"Test metrics-apm.internal-* data stream rerouting": - - do: - bulk: - index: metrics-apm.internal-testing - refresh: true - body: - - create: {} - - "@timestamp": "2017-06-22" - data_stream.type: metrics - data_stream.dataset: apm.internal - data_stream.namespace: testing - metricset: - name: transaction - - create: {} - - "@timestamp": "2017-06-22" - data_stream.type: metrics - data_stream.dataset: apm.internal - data_stream.namespace: testing - metricset: - name: service_destination - - create: {} - - "@timestamp": "2017-06-22" - data_stream.type: metrics - data_stream.dataset: apm.internal - data_stream.namespace: testing - metricset: - name: app_config # should not be rerouted - - do: - indices.get_data_stream: - name: metrics-apm.transaction.1m-testing - - do: - indices.get_data_stream: - name: metrics-apm.service_destination.1m-testing - - do: - indices.get_data_stream: - name: metrics-apm.internal-testing - - do: - search: - index: metrics-apm* - - length: {hits.hits: 3} - - match: {hits.hits.0._source.data_stream.dataset: "apm.internal"} - - match: {hits.hits.1._source.data_stream.dataset: "apm.service_destination.1m"} - - match: {hits.hits.1._source.metricset.interval: "1m"} - - match: {hits.hits.2._source.data_stream.dataset: "apm.transaction.1m"} - - match: {hits.hits.2._source.metricset.interval: "1m"} - ---- -"Test metrics-apm.app-* dynamic mapping": - - do: - bulk: - index: metrics-apm.app.svc1-testing - refresh: true - body: - - create: {} - - "@timestamp": "2017-06-22" - data_stream.type: metrics - data_stream.dataset: apm.app.svc1 - data_stream.namespace: testing - metricset: - name: app - samples: - - name: double_metric - type: gauge - value: 123 - - name: summary_metric - type: summary - value_count: 123 - sum: 456.789 - - name: histogram_metric - type: histogram - counts: [1, 2, 3] - values: [1.5, 2.5, 3.5] - - set: - items.0.create._index: index - - do: - # Wait for cluster state changes to be applied before - # querying field mappings. - cluster.health: - wait_for_events: languid - - do: - indices.get_field_mapping: - index: metrics-apm.app.svc1-testing - fields: [double_metric, summary_metric, histogram_metric] - - match: - $body: - $index: - mappings: - double_metric: - full_name: double_metric - mapping: - double_metric: - type: double - index: false - summary_metric: - full_name: summary_metric - mapping: - summary_metric: - type: aggregate_metric_double - metrics : [sum, value_count] - default_metric: value_count - histogram_metric: - full_name: histogram_metric - mapping: - histogram_metric: - type: histogram diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/20_error_grouping.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/20_error_grouping.yml new file mode 100644 index 0000000000000..f7cd386227fe8 --- /dev/null +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/20_error_grouping.yml @@ -0,0 +1,56 @@ +--- +setup: + - do: + cluster.health: + wait_for_events: languid + +--- +"Test logs-apm.error-* error grouping": + - do: + bulk: + index: logs-apm.error-testing + refresh: true + body: + # No error object field + - create: {} + - '{"@timestamp": "2017-06-22"}' + + # Empty error object + - create: {} + - '{"@timestamp": "2017-06-22", "error": {}}' + + # Empty error.log object + - create: {} + - '{"@timestamp": "2017-06-22", "error": {"log": {}}}' + + # Empty error.exception array + - create: {} + - '{"@timestamp": "2017-06-22", "error": {"exception": []}}' + + # Empty error.exception object + - create: {} + - '{"@timestamp": "2017-06-22", "error": {"exception": [{}]}}' + + # Non-empty error.log.message used + - create: {} + - '{"@timestamp": "2017-06-22", "error": {"log": {"message": "log_used"}, "exception": [{"message": "ignored"}]}}' + + # Non-empty error.exception.message used + - create: {} + - '{"@timestamp": "2017-06-22", "error": {"log": {"message": ""}, "exception": [{"message": "exception_used"}]}}' + + - is_false: errors + + - do: + search: + index: logs-apm.error-testing + body: + fields: ["error.grouping_name"] + - length: { hits.hits: 7 } + - match: { hits.hits.0.fields: null } + - match: { hits.hits.1.fields: null } + - match: { hits.hits.2.fields: null } + - match: { hits.hits.3.fields: null } + - match: { hits.hits.4.fields: null } + - match: { hits.hits.5.fields: {"error.grouping_name": ["log_used"]} } + - match: { hits.hits.6.fields: {"error.grouping_name": ["exception_used"]} } diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/20_flattened_fields.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/20_flattened_fields.yml new file mode 100644 index 0000000000000..adb248b23fe5b --- /dev/null +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/20_flattened_fields.yml @@ -0,0 +1,107 @@ +--- +setup: + - do: + cluster.health: + wait_for_events: languid + +--- +"Test traces-apm-* flattened fields": + - do: + bulk: + index: traces-apm-testing + refresh: true + body: + # http.request.body should be mapped as flattened, allowing + # differing types to be used in http.request.body.original. + - create: {} + - '{"@timestamp": "2017-06-22", "http.request.body": {"original": "text"}}' + - create: {} + - '{"@timestamp": "2017-06-22", "http.request.body": {"original": {"field": "value"}}}' + + # span.stacktrace is a complex object whose structure may + # change over time, and which is always treated as an object. + # Moreover, stacktraces may contain dynamic "vars" whose + # types may change from one document to the next. + - create: {} + - '{"@timestamp": "2017-06-22", "span.stacktrace": [{"vars": {"a": 123}}]}' + - create: {} + - '{"@timestamp": "2017-06-22", "span.stacktrace": [{"vars": {"a": "b"}}]}' + + # transaction.custom is a complex object of fields with + # arbitrary field types that may change from one document + # to the next. + - create: {} + - '{"@timestamp": "2017-06-22", "transaction.custom": {"a": {"b": 123}}}' + - create: {} + - '{"@timestamp": "2017-06-22", "transaction.custom": {"a": "b"}}' + + - is_false: errors + + - do: + search: + index: traces-apm-testing + body: + fields: ["http.request.body", "span.stacktrace", "transaction.custom"] + - length: { hits.hits: 6 } + - match: { hits.hits.0.fields: {"http.request.body": [{"original": "text"}]} } + - match: { hits.hits.1.fields: {"http.request.body": [{"original": {"field": "value"}}]} } + - match: { hits.hits.2.fields: {"span.stacktrace": [{"vars": {"a": 123}}]} } + - match: { hits.hits.3.fields: {"span.stacktrace": [{"vars": {"a": "b"}}]} } + - match: { hits.hits.4.fields: {"transaction.custom": [{"a": {"b": 123}}]} } + - match: { hits.hits.5.fields: {"transaction.custom": [{"a": "b"}]} } + +--- +"Test logs-apm.error-* flattened fields": + - do: + bulk: + index: logs-apm.error-testing + refresh: true + body: + # http.request.body has the same requirements as http.request.body + # in traces-apm-* data streams. + - create: {} + - '{"@timestamp": "2017-06-22", "http.request.body": {"original": "text"}}' + - create: {} + - '{"@timestamp": "2017-06-22", "http.request.body": {"original": {"field": "value"}}}' + + # error.{exception,log}.stacktrace have the same requirements as span.stacktrace. + - create: {} + - '{"@timestamp": "2017-06-22", "error.exception.stacktrace": [{"vars": {"a": 123}}]}' + - create: {} + - '{"@timestamp": "2017-06-22", "error.exception.stacktrace": [{"vars": {"a": "b"}}]}' + - create: {} + - '{"@timestamp": "2017-06-22", "error.log.stacktrace": [{"vars": {"a": 123}}]}' + - create: {} + - '{"@timestamp": "2017-06-22", "error.log.stacktrace": [{"vars": {"a": "b"}}]}' + + # error.exception.attributes is a complex object with arbitrary field types + # that may change from one document to the next. + - create: {} + - '{"@timestamp": "2017-06-22", "error.exception": [{"attributes": {"a": 123}}]}' + - create: {} + - '{"@timestamp": "2017-06-22", "error.exception": [{"attributes": {"a": "b"}}]}' + + # error.custom has the same requirements as transaction.custom. + - create: {} + - '{"@timestamp": "2017-06-22", "error.custom": {"a": {"b": 123}}}' + - create: {} + - '{"@timestamp": "2017-06-22", "error.custom": {"a": "b"}}' + + - is_false: errors + + - do: + search: + index: logs-apm.error-testing + body: + fields: ["http.request.body", "error.log.*", "error.exception.*", "error.custom"] + - length: { hits.hits: 10 } + - match: { hits.hits.0.fields: {"http.request.body": [{"original": "text"}]} } + - match: { hits.hits.1.fields: {"http.request.body": [{"original": {"field": "value"}}]} } + - match: { hits.hits.2.fields: {"error.exception.stacktrace": [{"vars": {"a": 123}}]} } + - match: { hits.hits.3.fields: {"error.exception.stacktrace": [{"vars": {"a": "b"}}]} } + - match: { hits.hits.4.fields: {"error.log.stacktrace": [{"vars": {"a": 123}}]} } + - match: { hits.hits.5.fields: {"error.log.stacktrace": [{"vars": {"a": "b"}}]} } + - match: { hits.hits.6.fields: {"error.exception.attributes": [{"a": 123}]} } + - match: { hits.hits.7.fields: {"error.exception.attributes": [{"a": "b"}]} } + - match: { hits.hits.8.fields: {"error.custom": [{"a": {"b": 123}}]} } + - match: { hits.hits.9.fields: {"error.custom": [{"a": "b"}]} } diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_mapping.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_mapping.yml new file mode 100644 index 0000000000000..85858a9c5ed2e --- /dev/null +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_mapping.yml @@ -0,0 +1,65 @@ +--- +setup: + - do: + cluster.health: + wait_for_events: languid + +--- +"Test metrics-apm.app-* dynamic mapping": + - do: + bulk: + index: metrics-apm.app.svc1-testing + refresh: true + body: + - create: {} + - "@timestamp": "2017-06-22" + data_stream.type: metrics + data_stream.dataset: apm.app.svc1 + data_stream.namespace: testing + metricset: + name: app + samples: + - name: double_metric + type: gauge + value: 123 + - name: summary_metric + type: summary + value_count: 123 + sum: 456.789 + - name: histogram_metric + type: histogram + counts: [1, 2, 3] + values: [1.5, 2.5, 3.5] + - set: + items.0.create._index: index + - do: + # Wait for cluster state changes to be applied before + # querying field mappings. + cluster.health: + wait_for_events: languid + - do: + indices.get_field_mapping: + index: metrics-apm.app.svc1-testing + fields: [double_metric, summary_metric, histogram_metric] + - match: + $body: + $index: + mappings: + double_metric: + full_name: double_metric + mapping: + double_metric: + type: double + index: false + summary_metric: + full_name: summary_metric + mapping: + summary_metric: + type: aggregate_metric_double + metrics : [sum, value_count] + default_metric: value_count + histogram_metric: + full_name: histogram_metric + mapping: + histogram_metric: + type: histogram diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_rerouting.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_rerouting.yml new file mode 100644 index 0000000000000..f5f2307570563 --- /dev/null +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/20_metrics_rerouting.yml @@ -0,0 +1,52 @@ +--- +setup: + - do: + cluster.health: + wait_for_events: languid + +--- +"Test metrics-apm.internal-* data stream rerouting": + - do: + bulk: + index: metrics-apm.internal-testing + refresh: true + body: + - create: {} + - "@timestamp": "2017-06-22" + data_stream.type: metrics + data_stream.dataset: apm.internal + data_stream.namespace: testing + metricset: + name: transaction + - create: {} + - "@timestamp": "2017-06-22" + data_stream.type: metrics + data_stream.dataset: apm.internal + data_stream.namespace: testing + metricset: + name: service_destination + - create: {} + - "@timestamp": "2017-06-22" + data_stream.type: metrics + data_stream.dataset: apm.internal + data_stream.namespace: testing + metricset: + name: app_config # should not be rerouted + - do: + indices.get_data_stream: + name: metrics-apm.transaction.1m-testing + - do: + indices.get_data_stream: + name: metrics-apm.service_destination.1m-testing + - do: + indices.get_data_stream: + name: metrics-apm.internal-testing + - do: + search: + index: metrics-apm* + - length: {hits.hits: 3} + - match: {hits.hits.0._source.data_stream.dataset: "apm.internal"} + - match: {hits.hits.1._source.data_stream.dataset: "apm.service_destination.1m"} + - match: {hits.hits.1._source.metricset.interval: "1m"} + - match: {hits.hits.2._source.data_stream.dataset: "apm.transaction.1m"} + - match: {hits.hits.2._source.metricset.interval: "1m"} diff --git a/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/20_traces_ingest.yml b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/20_traces_ingest.yml new file mode 100644 index 0000000000000..ea7f948abf0b8 --- /dev/null +++ b/x-pack/plugin/apm-data/src/yamlRestTest/resources/rest-api-spec/test/20_traces_ingest.yml @@ -0,0 +1,99 @@ +--- +setup: + - do: + cluster.health: + wait_for_events: languid + +--- +"Test traces-apm-* processor.event inference": + - do: + bulk: + index: traces-apm-testing + refresh: true + body: + # `processor.event: transaction` is inferred from presence of `transaction.type` + - create: {} + - '{"@timestamp": "2017-06-22", "transaction": {"type": "foo"}}' + + # `processor.event: span` is inferred otherwise + - create: {} + - '{"@timestamp": "2017-06-22"}' + + - is_false: errors + + - do: + search: + index: traces-apm-testing + body: + fields: ["processor.event"] + - length: { hits.hits: 2 } + - match: { hits.hits.0.fields: {"processor.event": ["transaction"]} } + - match: { hits.hits.1.fields: {"processor.event": ["span"]} } + +--- +"Test traces-apm-* setting *.duration.us from event.duration": + - do: + bulk: + index: traces-apm-testing + refresh: true + body: + - create: {} + - '{"@timestamp": "2017-06-22", "transaction": {"type": "foo"}, "event": {"duration": 1234}}' + + - create: {} + - '{"@timestamp": "2017-06-22", "event": {"duration": 1234}}' + + # If event.duration is omitted, it is assumed to be zero. + - create: {} + - '{"@timestamp": "2017-06-22"}' + + # An existing field will not be overwritten. + - create: {} + - '{"@timestamp": "2017-06-22", "span": {"duration": {"us": 789}}}' + + - is_false: errors + + - do: + search: + index: traces-apm-testing + body: + fields: ["event.duration", "*.duration.us"] + - length: { hits.hits: 4 } + - match: { hits.hits.0.fields: {"transaction.duration.us": [1]} } + - match: { hits.hits.1.fields: {"span.duration.us": [1]} } + - match: { hits.hits.2.fields: {"span.duration.us": [0]} } + - match: { hits.hits.3.fields: {"span.duration.us": [789]} } + +--- +"Test traces-apm-* setting event.success_count from event.outcome": + - do: + bulk: + index: traces-apm-testing + refresh: true + body: + # No event.outcome, no event.success_count + - create: {} + - '{"@timestamp": "2017-06-22"}' + + # event.outcome: unknown, no event.success_count + - create: {} + - '{"@timestamp": "2017-06-22", "event": {"outcome": "unknown"}}' + + - create: {} + - '{"@timestamp": "2017-06-22", "event": {"outcome": "success"}}' + + - create: {} + - '{"@timestamp": "2017-06-22", "event": {"outcome": "failure"}}' + + - is_false: errors + + - do: + search: + index: traces-apm-testing + body: + fields: ["event.success_count"] + - length: { hits.hits: 4 } + - match: { hits.hits.0.fields: null } + - match: { hits.hits.1.fields: null } + - match: { hits.hits.2.fields: {"event.success_count": [1]} } + - match: { hits.hits.3.fields: {"event.success_count": [0]} } diff --git a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java index fb3696a79a579..21a2c2295c809 100644 --- a/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java +++ b/x-pack/plugin/async-search/qa/security/src/javaRestTest/java/org/elasticsearch/xpack/search/AsyncSearchSecurityIT.java @@ -229,7 +229,7 @@ public void testRejectPointInTimeWithIndices() throws Exception { try { final Request request = new Request("POST", "/_async_search"); setRunAsHeader(request, authorizedUser); - request.addParameter("wait_for_completion_timeout", "true"); + request.addParameter("wait_for_completion_timeout", "1s"); request.addParameter("keep_on_completion", "true"); if (randomBoolean()) { request.addParameter("index", "index-" + authorizedUser); diff --git a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java index 3f888685f33db..25ff78f5c0ed2 100644 --- a/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java +++ b/x-pack/plugin/async-search/src/internalClusterTest/java/org/elasticsearch/xpack/search/AsyncSearchIntegTestCase.java @@ -193,7 +193,11 @@ protected void ensureTaskNotRunning(String id) throws Exception { assertBusy(() -> { try { AsyncSearchResponse resp = getAsyncSearch(id); - assertFalse(resp.isRunning()); + try { + assertFalse(resp.isRunning()); + } finally { + resp.decRef(); + } } catch (Exception exc) { if (ExceptionsHelper.unwrapCause(exc.getCause()) instanceof ResourceNotFoundException == false) { throw exc; diff --git a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java index dc6c780c64644..ed2f4a78e259c 100644 --- a/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java +++ b/x-pack/plugin/async-search/src/main/java/org/elasticsearch/xpack/search/MutableSearchResponse.java @@ -18,7 +18,6 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.xpack.core.search.action.AsyncSearchResponse; import org.elasticsearch.xpack.core.search.action.AsyncStatusResponse; @@ -156,18 +155,15 @@ void addQueryFailure(int shardIndex, ShardSearchFailure shardSearchFailure) { } private SearchResponse buildResponse(long taskStartTimeNanos, InternalAggregations reducedAggs) { - InternalSearchResponse internal = new InternalSearchResponse( + long tookInMillis = TimeValue.timeValueNanos(System.nanoTime() - taskStartTimeNanos).getMillis(); + return new SearchResponse( new SearchHits(SearchHits.EMPTY, totalHits, Float.NaN), reducedAggs, null, - null, false, false, - reducePhase - ); - long tookInMillis = TimeValue.timeValueNanos(System.nanoTime() - taskStartTimeNanos).getMillis(); - return new SearchResponse( - internal, + null, + reducePhase, null, totalShards, successfulShards, diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchResponseTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchResponseTests.java index dae7d79913690..f3d6f352db186 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchResponseTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchResponseTests.java @@ -11,7 +11,6 @@ import org.elasticsearch.TransportVersion; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.common.Strings; @@ -25,8 +24,8 @@ import org.elasticsearch.script.ScriptException; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xcontent.ToXContent; @@ -129,15 +128,13 @@ static SearchResponse randomSearchResponse(boolean ccs) { int totalShards = randomIntBetween(1, Integer.MAX_VALUE); int successfulShards = randomIntBetween(0, totalShards); int skippedShards = randomIntBetween(0, successfulShards); - InternalSearchResponse internalSearchResponse = InternalSearchResponse.EMPTY_WITH_TOTAL_HITS; SearchResponse.Clusters clusters; if (ccs) { clusters = createCCSClusterObjects(20, 19, true, 10, 1, 2); } else { clusters = SearchResponse.Clusters.EMPTY; } - return new SearchResponse( - internalSearchResponse, + return SearchResponseUtils.emptyWithTotalHits( null, totalShards, successfulShards, @@ -211,9 +208,14 @@ public void testToXContentWithSearchResponseAfterCompletion() throws IOException long expectedCompletionTime = startTimeMillis + took; SearchHits hits = SearchHits.EMPTY_WITHOUT_TOTAL_HITS; - SearchResponseSections sections = new SearchResponseSections(hits, null, null, false, null, null, 2); SearchResponse searchResponse = new SearchResponse( - sections, + hits, + null, + null, + false, + null, + null, + 2, null, 10, 9, @@ -316,11 +318,25 @@ public void testToXContentWithCCSSearchResponseWhileRunning() throws IOException long took = 22968L; SearchHits hits = SearchHits.EMPTY_WITHOUT_TOTAL_HITS; - SearchResponseSections sections = new SearchResponseSections(hits, null, null, false, null, null, 2); SearchResponse.Clusters clusters = createCCSClusterObjects(3, 3, true); - SearchResponse searchResponse = new SearchResponse(sections, null, 10, 9, 1, took, ShardSearchFailure.EMPTY_ARRAY, clusters); + SearchResponse searchResponse = new SearchResponse( + hits, + null, + null, + false, + null, + null, + 2, + null, + 10, + 9, + 1, + took, + ShardSearchFailure.EMPTY_ARRAY, + clusters + ); AsyncSearchResponse asyncSearchResponse = new AsyncSearchResponse( "id", @@ -462,7 +478,6 @@ public void testToXContentWithCCSSearchResponseAfterCompletion() throws IOExcept long expectedCompletionTime = startTimeMillis + took; SearchHits hits = SearchHits.EMPTY_WITHOUT_TOTAL_HITS; - SearchResponseSections sections = new SearchResponseSections(hits, null, null, true, null, null, 2); SearchResponse.Clusters clusters = createCCSClusterObjects(4, 3, true); SearchResponse.Cluster updated = clusters.swapCluster( @@ -532,7 +547,22 @@ public void testToXContentWithCCSSearchResponseAfterCompletion() throws IOExcept ); assertNotNull("Set cluster failed for cluster " + cluster2.getClusterAlias(), updated); - SearchResponse searchResponse = new SearchResponse(sections, null, 10, 9, 1, took, new ShardSearchFailure[0], clusters); + SearchResponse searchResponse = new SearchResponse( + hits, + null, + null, + true, + null, + null, + 2, + null, + 10, + 9, + 1, + took, + new ShardSearchFailure[0], + clusters + ); AsyncSearchResponse asyncSearchResponse = new AsyncSearchResponse( "id", @@ -659,9 +689,14 @@ public void testToXContentWithSearchResponseWhileRunning() throws IOException { long took = 22968L; SearchHits hits = SearchHits.EMPTY_WITHOUT_TOTAL_HITS; - SearchResponseSections sections = new SearchResponseSections(hits, null, null, false, null, null, 2); SearchResponse searchResponse = new SearchResponse( - sections, + hits, + null, + null, + false, + null, + null, + 2, null, 10, 9, diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java index 9dccdf39128ea..f119e590cc75c 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncSearchTaskTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.search.aggregations.InternalAggregations; import org.elasticsearch.search.aggregations.bucket.terms.StringTerms; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.client.NoOpClient; @@ -405,17 +404,14 @@ private static SearchResponse newSearchResponse( int skippedShards, ShardSearchFailure... failures ) { - InternalSearchResponse response = new InternalSearchResponse( + return new SearchResponse( SearchHits.EMPTY_WITH_TOTAL_HITS, InternalAggregations.EMPTY, null, - null, false, null, - 1 - ); - return new SearchResponse( - response, + null, + 1, null, totalShards, successfulShards, diff --git a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncStatusResponseTests.java b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncStatusResponseTests.java index 5aab26b3eba58..653ae8cafc531 100644 --- a/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncStatusResponseTests.java +++ b/x-pack/plugin/async-search/src/test/java/org/elasticsearch/xpack/search/AsyncStatusResponseTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -316,10 +316,8 @@ public void testGetStatusFromStoredSearchFailedShardsScenario() { int totalShards = randomIntBetween(1, Integer.MAX_VALUE); int successfulShards = randomIntBetween(0, totalShards); int skippedShards = randomIntBetween(0, successfulShards); - InternalSearchResponse internalSearchResponse = InternalSearchResponse.EMPTY_WITH_TOTAL_HITS; SearchResponse.Clusters clusters = new SearchResponse.Clusters(100, 99, 1); - SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, + SearchResponse searchResponse = SearchResponseUtils.emptyWithTotalHits( null, totalShards, successfulShards, @@ -343,10 +341,8 @@ public void testGetStatusFromStoredSearchWithEmptyClustersSuccessfullyCompleted( int totalShards = randomIntBetween(1, Integer.MAX_VALUE); int successfulShards = randomIntBetween(0, totalShards); int skippedShards = randomIntBetween(0, successfulShards); - InternalSearchResponse internalSearchResponse = InternalSearchResponse.EMPTY_WITH_TOTAL_HITS; - SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, + SearchResponse searchResponse = SearchResponseUtils.emptyWithTotalHits( null, totalShards, successfulShards, @@ -370,7 +366,6 @@ public void testGetStatusFromStoredSearchWithNonEmptyClustersSuccessfullyComplet int totalShards = randomIntBetween(1, Integer.MAX_VALUE); int successfulShards = randomIntBetween(0, totalShards); int skippedShards = randomIntBetween(0, successfulShards); - InternalSearchResponse internalSearchResponse = InternalSearchResponse.EMPTY_WITH_TOTAL_HITS; int totalClusters; int successfulClusters; @@ -390,8 +385,7 @@ public void testGetStatusFromStoredSearchWithNonEmptyClustersSuccessfullyComplet skippedClusters = totalClusters - (successfulClusters + partial); clusters = AsyncSearchResponseTests.createCCSClusterObjects(80, 80, true, successfulClusters, skippedClusters, partial); } - SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, + SearchResponse searchResponse = SearchResponseUtils.emptyWithTotalHits( null, totalShards, successfulShards, @@ -421,7 +415,6 @@ public void testGetStatusFromStoredSearchWithNonEmptyClustersStillRunning() { int totalShards = randomIntBetween(1, Integer.MAX_VALUE); int successfulShards = randomIntBetween(0, totalShards); int skippedShards = randomIntBetween(0, successfulShards); - InternalSearchResponse internalSearchResponse = InternalSearchResponse.EMPTY_WITH_TOTAL_HITS; int successful = randomInt(10); int partial = randomInt(10); int skipped = randomInt(10); @@ -437,8 +430,7 @@ public void testGetStatusFromStoredSearchWithNonEmptyClustersStillRunning() { } SearchResponse.Clusters clusters = AsyncSearchResponseTests.createCCSClusterObjects(100, 99, true, successful, skipped, partial); - SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, + SearchResponse searchResponse = SearchResponseUtils.emptyWithTotalHits( null, totalShards, successfulShards, diff --git a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderIT.java b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderIT.java index 13056ed2e4d5e..13e7d3aca1501 100644 --- a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderIT.java +++ b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/FrozenStorageDeciderIT.java @@ -31,8 +31,8 @@ public void testScale() throws Exception { capacity().results().get("frozen").requiredCapacity().total().storage(), equalTo( ByteSizeValue.ofBytes( - (long) (statsResponse.getPrimaries().store.totalDataSetSize().getBytes() - * FrozenStorageDeciderService.DEFAULT_PERCENTAGE) / 100 + (long) (statsResponse.getPrimaries().store.totalDataSetSizeInBytes() * FrozenStorageDeciderService.DEFAULT_PERCENTAGE) + / 100 ) ) ); diff --git a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageIT.java b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageIT.java index c5e062df5e77c..d84c5977cba93 100644 --- a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageIT.java +++ b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ProactiveStorageIT.java @@ -67,7 +67,7 @@ public void testScaleUp() throws IOException, InterruptedException { capacity(); IndicesStatsResponse stats = indicesAdmin().prepareStats(dsName).clear().setStore(true).get(); - long used = stats.getTotal().getStore().getSizeInBytes(); + long used = stats.getTotal().getStore().sizeInBytes(); long maxShardSize = Arrays.stream(stats.getShards()).mapToLong(s -> s.getStats().getStore().sizeInBytes()).max().orElseThrow(); // As long as usage is above low watermark, we will trigger a proactive scale up, since the simulated shards have an in-sync // set and therefore allocating these do not skip the low watermark check in the disk threshold decider. diff --git a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageIT.java b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageIT.java index 5c097cdc24ed1..5f724509ec98a 100644 --- a/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageIT.java +++ b/x-pack/plugin/autoscaling/src/internalClusterTest/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageIT.java @@ -78,7 +78,7 @@ public void testScaleUp() throws InterruptedException { capacity(); IndicesStatsResponse stats = indicesAdmin().prepareStats(indexName).clear().setStore(true).get(); - long used = stats.getTotal().getStore().getSizeInBytes(); + long used = stats.getTotal().getStore().sizeInBytes(); long minShardSize = Arrays.stream(stats.getShards()).mapToLong(s -> s.getStats().getStore().sizeInBytes()).min().orElseThrow(); long maxShardSize = Arrays.stream(stats.getShards()).mapToLong(s -> s.getStats().getStore().sizeInBytes()).max().orElseThrow(); long enoughSpace = used + HIGH_WATERMARK_BYTES + 1; @@ -274,14 +274,14 @@ public void testScaleWhileShrinking() throws Exception { refresh(); IndicesStatsResponse stats = indicesAdmin().prepareStats(indexName).clear().setStore(true).get(); - long used = stats.getTotal().getStore().getSizeInBytes(); + long used = stats.getTotal().getStore().sizeInBytes(); long maxShardSize = Arrays.stream(stats.getShards()).mapToLong(s -> s.getStats().getStore().sizeInBytes()).max().orElseThrow(); Map byNode = Arrays.stream(stats.getShards()) .collect( Collectors.groupingBy( s -> s.getShardRouting().currentNodeId(), - Collectors.summingLong(s -> s.getStats().getStore().getSizeInBytes()) + Collectors.summingLong(s -> s.getStats().getStore().sizeInBytes()) ) ); @@ -427,7 +427,7 @@ public void testScaleDuringSplitOrClone() throws Exception { refresh(); IndicesStatsResponse stats = indicesAdmin().prepareStats(indexName).clear().setStore(true).get(); - long used = stats.getTotal().getStore().getSizeInBytes(); + long used = stats.getTotal().getStore().sizeInBytes(); long enoughSpace = used + HIGH_WATERMARK_BYTES + 1; diff --git a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderServiceTests.java b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderServiceTests.java index a15ff51fc24ee..12291799b430a 100644 --- a/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderServiceTests.java +++ b/x-pack/plugin/autoscaling/src/test/java/org/elasticsearch/xpack/autoscaling/storage/ReactiveStorageDeciderServiceTests.java @@ -55,12 +55,12 @@ import org.elasticsearch.xpack.cluster.routing.allocation.DataTierAllocationDecider; import java.util.Arrays; -import java.util.Collection; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; +import java.util.function.Predicate; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; @@ -520,25 +520,26 @@ public void testUnmovableSize() { stateBuilder.metadata(metaBuilder); ClusterState clusterState = stateBuilder.build(); - Set shards = IntStream.range(0, between(1, 10)) + var shards = IntStream.range(0, between(1, 10)) .mapToObj(i -> Tuple.tuple(new ShardId(indexMetadata.getIndex(), randomInt(10)), randomBoolean())) .distinct() .map(t -> TestShardRouting.newShardRouting(t.v1(), nodeId, t.v2(), ShardRoutingState.STARTED)) - .collect(Collectors.toSet()); + .toList(); long minShardSize = randomLongBetween(1, 10); - Map diskUsages = new HashMap<>(); - diskUsages.put(nodeId, new DiskUsage(nodeId, null, null, ByteSizeUnit.KB.toBytes(100), ByteSizeUnit.KB.toBytes(5))); - Map shardSize = new HashMap<>(); ShardRouting missingShard = randomBoolean() ? randomFrom(shards) : null; - Collection shardsWithSizes = shards.stream().filter(s -> s != missingShard).collect(Collectors.toSet()); - for (ShardRouting shard : shardsWithSizes) { - shardSize.put(shardIdentifier(shard), ByteSizeUnit.KB.toBytes(randomLongBetween(minShardSize, 100))); + Map shardSize = new HashMap<>(); + for (ShardRouting shard : shards) { + if (shard != missingShard) { + shardSize.put(shardIdentifier(shard), ByteSizeUnit.KB.toBytes(randomLongBetween(minShardSize, 100))); + } } - if (shardsWithSizes.isEmpty() == false) { - shardSize.put(shardIdentifier(randomFrom(shardsWithSizes)), ByteSizeUnit.KB.toBytes(minShardSize)); + if (shardSize.isEmpty() == false) { + shardSize.put(randomFrom(shardSize.keySet()), ByteSizeUnit.KB.toBytes(minShardSize)); } + + var diskUsages = Map.of(nodeId, new DiskUsage(nodeId, null, null, ByteSizeUnit.KB.toBytes(100), ByteSizeUnit.KB.toBytes(5))); ClusterInfo info = new ClusterInfo(diskUsages, diskUsages, shardSize, Map.of(), Map.of(), Map.of()); ReactiveStorageDeciderService.AllocationState allocationState = new ReactiveStorageDeciderService.AllocationState( @@ -553,11 +554,12 @@ public void testUnmovableSize() { ); long result = allocationState.unmovableSize(nodeId, shards); - if (missingShard != null - && (missingShard.primary() - || clusterState.getRoutingNodes().activePrimary(missingShard.shardId()) == null - || info.getShardSize(clusterState.getRoutingNodes().activePrimary(missingShard.shardId())) == null) - || minShardSize < 5) { + + Predicate shardSizeKnown = shard -> shard.primary() + ? info.getShardSize(shard.shardId(), true) != null + : info.getShardSize(shard.shardId(), true) != null || info.getShardSize(shard.shardId(), false) != null; + + if ((missingShard != null && shardSizeKnown.test(missingShard) == false) || minShardSize < 5) { // the diff between used and high watermark is 5 KB. assertThat(result, equalTo(ByteSizeUnit.KB.toBytes(5))); } else { diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java index 1766d8fe47820..7ca37f376045f 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/BlobCacheMetrics.java @@ -20,19 +20,19 @@ public class BlobCacheMetrics { public BlobCacheMetrics(MeterRegistry meterRegistry) { this( meterRegistry.registerLongCounter( - "elasticsearch.blob_cache.miss_that_triggered_read", + "es.blob_cache.miss_that_triggered_read.total", "The number of times there was a cache miss that triggered a read from the blob store", "count" ), meterRegistry.registerLongCounter( - "elasticsearch.blob_cache.count_of_evicted_used_regions", + "es.blob_cache.count_of_evicted_used_regions.total", "The number of times a cache entry was evicted where the frequency was not zero", "entries" ), meterRegistry.registerLongHistogram( - "elasticsearch.blob_cache.cache_miss_load_times", - "The timing data for populating entries in the blob store resulting from a cache miss.", - "count" + "es.blob_cache.cache_miss_load_times.histogram", + "The time in microseconds for populating entries in the blob store resulting from a cache miss, expressed as a histogram.", + "micros" ) ); } diff --git a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java index be95f5c883de8..5e8933f86ae7d 100644 --- a/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java +++ b/x-pack/plugin/blob-cache/src/main/java/org/elasticsearch/blobcache/shared/SharedBlobCacheService.java @@ -55,6 +55,7 @@ import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; +import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.LongAdder; import java.util.function.IntConsumer; @@ -815,7 +816,7 @@ public int populateAndRead( ) throws Exception { // We are interested in the total time that the system spends when fetching a result (including time spent queuing), so we start // our measurement here. - final long startTime = threadPool.relativeTimeInMillis(); + final long startTime = threadPool.relativeTimeInNanos(); RangeMissingHandler writerInstrumentationDecorator = ( SharedBytes.IO channel, int channelPos, @@ -823,7 +824,7 @@ public int populateAndRead( int length, IntConsumer progressUpdater) -> { writer.fillCacheRange(channel, channelPos, relativePos, length, progressUpdater); - var elapsedTime = threadPool.relativeTimeInMillis() - startTime; + var elapsedTime = TimeUnit.NANOSECONDS.toMicros(threadPool.relativeTimeInNanos() - startTime); SharedBlobCacheService.this.blobCacheMetrics.getCacheMissLoadTimes().record(elapsedTime); SharedBlobCacheService.this.blobCacheMetrics.getCacheMissCounter().increment(); }; diff --git a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java index f7baafa8402d0..dff3ff935595f 100644 --- a/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java +++ b/x-pack/plugin/ccr/src/internalClusterTest/java/org/elasticsearch/xpack/ccr/CcrRepositoryIT.java @@ -537,7 +537,7 @@ public void testCcrRepositoryFetchesSnapshotShardSizeFromIndexShardStoreStats() assertThat(indexShardSnapshotStatus.getStage(), is(IndexShardSnapshotStatus.Stage.DONE)); assertThat( indexShardSnapshotStatus.getTotalSize(), - equalTo(indexStats.getIndexShards().get(shardId).getPrimary().getStore().getSizeInBytes()) + equalTo(indexStats.getIndexShards().get(shardId).getPrimary().getStore().sizeInBytes()) ); } @@ -594,7 +594,7 @@ public void testCcrRepositoryFetchesSnapshotShardSizeFromIndexShardStoreStats() assertThat( "Snapshot shard size fetched for follower shard [" + shardId + "] does not match leader store size", fetchedSnapshotShardSizes.get(shardId), - equalTo(indexStats.getIndexShards().get(shardId).getPrimary().getStore().getSizeInBytes()) + equalTo(indexStats.getIndexShards().get(shardId).getPrimary().getStore().sizeInBytes()) ); } diff --git a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java index b90b203e2d29f..c99726803e00e 100644 --- a/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java +++ b/x-pack/plugin/ccr/src/main/java/org/elasticsearch/xpack/ccr/repository/CcrRepository.java @@ -517,7 +517,7 @@ public IndexShardSnapshotStatus.Copy getShardSnapshotStatus(SnapshotId snapshotI final ShardRouting shardRouting = shardStats.getShardRouting(); if (shardRouting.shardId().id() == shardId.getId() && shardRouting.primary() && shardRouting.active()) { // we only care about the shard size here for shard allocation, populate the rest with dummy values - final long totalSize = shardStats.getStats().getStore().getSizeInBytes(); + final long totalSize = shardStats.getStats().getStore().sizeInBytes(); return IndexShardSnapshotStatus.newDone(0L, 0L, 1, 1, totalSize, totalSize, DUMMY_GENERATION); } } diff --git a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java index 047a2d6225035..ea4bc8c92047a 100644 --- a/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java +++ b/x-pack/plugin/ccr/src/test/java/org/elasticsearch/xpack/CcrIntegTestCase.java @@ -7,10 +7,10 @@ package org.elasticsearch.xpack; +import org.apache.logging.log4j.Level; import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.admin.cluster.health.ClusterHealthRequest; import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse; -import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksResponse; import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequest; @@ -38,6 +38,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.Priority; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.network.NetworkModule; @@ -59,6 +60,7 @@ import org.elasticsearch.indices.store.IndicesStore; import org.elasticsearch.license.LicenseSettings; import org.elasticsearch.license.LicensesMetadata; +import org.elasticsearch.monitor.jvm.HotThreads; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.search.SearchResponseUtils; @@ -421,24 +423,19 @@ private ClusterHealthStatus ensureColor( {} timed out: leader cluster state: {} - leader cluster hot threads: - {} leader cluster tasks: {} follower cluster state: {} - follower cluster hot threads: - {} follower cluster tasks: {}""", method, leaderClient().admin().cluster().prepareState().get().getState(), - getHotThreads(leaderClient()), - leaderClient().admin().cluster().preparePendingClusterTasks().get(), + ESIntegTestCase.getClusterPendingTasks(leaderClient()), followerClient().admin().cluster().prepareState().get().getState(), - getHotThreads(followerClient()), - followerClient().admin().cluster().preparePendingClusterTasks().get() + ESIntegTestCase.getClusterPendingTasks(followerClient()) ); + HotThreads.logLocalHotThreads(logger, Level.INFO, "hot threads at timeout", ReferenceDocs.LOGGING); fail("timed out waiting for " + color + " state"); } assertThat( @@ -450,19 +447,6 @@ private ClusterHealthStatus ensureColor( return actionGet.getStatus(); } - static String getHotThreads(Client client) { - return client.admin() - .cluster() - .prepareNodesHotThreads() - .setThreads(99999) - .setIgnoreIdleThreads(false) - .get() - .getNodes() - .stream() - .map(NodeHotThreads::getHotThreads) - .collect(Collectors.joining("\n")); - } - protected final Index resolveLeaderIndex(String index) { GetIndexResponse getIndexResponse = leaderClient().admin().indices().prepareGetIndex().setIndices(index).get(); assertTrue("index " + index + " not found", getIndexResponse.getSettings().containsKey(index)); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java index 133819cd601d7..f10e7cf170bde 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java @@ -160,6 +160,12 @@ public Iterator> settings() { Property.NodeScope ); + /** Optional setting to prevent startup if required providers are not discovered at runtime */ + public static final Setting> FIPS_REQUIRED_PROVIDERS = Setting.stringListSetting( + "xpack.security.fips_mode.required_providers", + Property.NodeScope + ); + /** * Setting for enabling the enrollment process, ie the enroll APIs are enabled, and the initial cluster node generates and displays * enrollment tokens (for Kibana and sometimes for ES nodes) when starting up for the first time. diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/EnterpriseSearchFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/EnterpriseSearchFeatureSetUsage.java index 505d85c764b17..e88d52e6d8080 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/EnterpriseSearchFeatureSetUsage.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/application/EnterpriseSearchFeatureSetUsage.java @@ -23,7 +23,7 @@ public class EnterpriseSearchFeatureSetUsage extends XPackFeatureSet.Usage { static final TransportVersion BEHAVIORAL_ANALYTICS_TRANSPORT_VERSION = TransportVersions.V_8_8_1; - static final TransportVersion QUERY_RULES_TRANSPORT_VERSION = TransportVersions.V_8_500_046; + static final TransportVersion QUERY_RULES_TRANSPORT_VERSION = TransportVersions.V_8_500_061; public static final String SEARCH_APPLICATIONS = "search_applications"; public static final String ANALYTICS_COLLECTIONS = "analytics_collections"; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java index 945084395448a..efc31aacf5e20 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/AsyncResultsService.java @@ -15,6 +15,7 @@ import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.TriConsumer; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.TimeValue; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.tasks.TaskManager; @@ -144,7 +145,17 @@ private void getSearchResponseFromIndex( long nowInMillis, ActionListener listener ) { - store.getResponse(searchId, true, listener.delegateFailure((l, response) -> sendFinalResponse(request, response, nowInMillis, l))); + store.getResponse(searchId, true, listener.delegateFailure((l, response) -> { + try { + sendFinalResponse(request, response, nowInMillis, l); + } finally { + if (response instanceof StoredAsyncResponse storedAsyncResponse + && storedAsyncResponse.getResponse() instanceof RefCounted refCounted) { + refCounted.decRef(); + } + } + + })); } private void sendFinalResponse(GetAsyncResultRequest request, Response response, long nowInMillis, ActionListener listener) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/StoredAsyncTask.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/StoredAsyncTask.java index 9f75ac0f5f564..9f420f7521cdb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/StoredAsyncTask.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/async/StoredAsyncTask.java @@ -23,7 +23,7 @@ public abstract class StoredAsyncTask extends C private final AsyncExecutionId asyncExecutionId; private final Map originHeaders; private volatile long expirationTimeMillis; - private final List> completionListeners; + protected final List> completionListeners; @SuppressWarnings("this-escape") public StoredAsyncTask( @@ -79,7 +79,8 @@ public synchronized void removeCompletionListener(ActionListener liste */ public synchronized void onResponse(Response response) { for (ActionListener listener : completionListeners) { - listener.onResponse(response); + response.incRef(); + ActionListener.respondAndRelease(listener, response); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java index f4c3704cd65c1..198cadc2c5cb7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/common/notifications/AbstractAuditor.java @@ -67,11 +67,9 @@ protected AbstractAuditor( ) { this(client, auditIndex, templateConfig.getTemplateName(), () -> { - try { + try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, templateConfig.loadBytes())) { return new PutComposableIndexTemplateAction.Request(templateConfig.getTemplateName()).indexTemplate( - ComposableIndexTemplate.parse( - JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, templateConfig.loadBytes()) - ) + ComposableIndexTemplate.parse(parser) ).masterNodeTimeout(MASTER_TIMEOUT); } catch (IOException e) { throw new ElasticsearchParseException("unable to parse composable template " + templateConfig.getTemplateName(), e); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleIndexerAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleIndexerAction.java index 10ae1846e91dc..ef93ab914f08f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleIndexerAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleIndexerAction.java @@ -69,7 +69,7 @@ public Request() {} public Request(StreamInput in) throws IOException { super(in); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040) && in.readBoolean()) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) && in.readBoolean()) { this.indexStartTimeMillis = in.readVLong(); this.indexEndTimeMillis = in.readVLong(); } else { @@ -132,7 +132,7 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { out.writeBoolean(true); out.writeVLong(indexStartTimeMillis); out.writeVLong(indexEndTimeMillis); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleShardStatus.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleShardStatus.java index 8f254043cf7c2..2700ed844d063 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleShardStatus.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/downsample/DownsampleShardStatus.java @@ -144,7 +144,7 @@ public DownsampleShardStatus(StreamInput in) throws IOException { numSent = in.readLong(); numIndexed = in.readLong(); numFailed = in.readLong(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040) && in.readBoolean()) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) && in.readBoolean()) { totalShardDocCount = in.readVLong(); lastSourceTimestamp = in.readVLong(); lastTargetTimestamp = in.readVLong(); @@ -254,7 +254,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeLong(numSent); out.writeLong(numIndexed); out.writeLong(numFailed); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { out.writeBoolean(true); out.writeVLong(totalShardDocCount); out.writeVLong(lastSourceTimestamp); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java index 22a2c3a880ce5..818b45c2b5d00 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ilm/DownsampleAction.java @@ -91,7 +91,7 @@ public DownsampleAction(final DateHistogramInterval fixedInterval, final TimeVal public DownsampleAction(StreamInput in) throws IOException { this( new DateHistogramInterval(in), - in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_054) + in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061) ? TimeValue.parseTimeValue(in.readString(), WAIT_TIMEOUT_FIELD.getPreferredName()) : DEFAULT_WAIT_TIMEOUT ); @@ -100,7 +100,7 @@ public DownsampleAction(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { fixedInterval.writeTo(out); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_054)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { out.writeString(waitTimeout.getStringRep()); } else { out.writeString(DEFAULT_WAIT_TIMEOUT.getStringRep()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java index e6e4ea1001f68..d09b96f897e06 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelAction.java @@ -20,6 +20,8 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import org.elasticsearch.xpack.core.ml.utils.MlStrings; import java.io.IOException; import java.util.Objects; @@ -82,7 +84,16 @@ public void writeTo(StreamOutput out) throws IOException { @Override public ActionRequestValidationException validate() { - return null; + ActionRequestValidationException validationException = new ActionRequestValidationException(); + if (MlStrings.isValidId(this.modelId) == false) { + validationException.addValidationError(Messages.getMessage(Messages.INVALID_ID, "model_id", this.modelId)); + } + + if (validationException.validationErrors().isEmpty() == false) { + return validationException; + } else { + return null; + } } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java index 7cef2bed04ce3..6209ead0cc6a1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlTasks.java @@ -229,9 +229,13 @@ public static SnapshotUpgradeState getSnapshotUpgradeState(@Nullable PersistentT public static DatafeedState getDatafeedState(String datafeedId, @Nullable PersistentTasksCustomMetadata tasks) { PersistentTasksCustomMetadata.PersistentTask task = getDatafeedTask(datafeedId, tasks); + return getDatafeedState(task); + } + + public static DatafeedState getDatafeedState(PersistentTasksCustomMetadata.PersistentTask task) { if (task == null) { // If we haven't started a datafeed then there will be no persistent task, - // which is the same as if the datafeed was't started + // which is the same as if the datafeed wasn't started return DatafeedState.STOPPED; } DatafeedState taskState = (DatafeedState) task.getState(); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java index 61b39e40a065c..5341efeec1094 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartAction.java @@ -91,7 +91,7 @@ public Request(StreamInput in) throws IOException { this.part = in.readVInt(); this.totalDefinitionLength = in.readVLong(); this.totalParts = in.readVInt(); - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_043)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { this.allowOverwriting = in.readBoolean(); } else { this.allowOverwriting = false; @@ -148,7 +148,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeVInt(part); out.writeVLong(totalDefinitionLength); out.writeVInt(totalParts); - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_043)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { out.writeBoolean(allowOverwriting); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java index 71d4ebdcb6ea5..c153cbc2c039b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelVocabularyAction.java @@ -91,7 +91,7 @@ public Request(StreamInput in) throws IOException { } else { this.scores = List.of(); } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_043)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { this.allowOverwriting = in.readBoolean(); } else { this.allowOverwriting = false; @@ -139,7 +139,7 @@ public void writeTo(StreamOutput out) throws IOException { if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_020)) { out.writeCollection(scores, StreamOutput::writeDouble); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_043)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { out.writeBoolean(allowOverwriting); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java index ad9ab7088fef5..c05c73bc31ddf 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/StartTrainedModelDeploymentAction.java @@ -374,8 +374,10 @@ public static class TaskParams implements MlTaskParams, Writeable, ToXContentObj // TODO add support for other roles? If so, it may have to be an instance method... // NOTE, whatever determines assignment should not be dynamically set on the node // Otherwise assignment logic might fail - public static boolean mayAssignToNode(DiscoveryNode node) { - return node.getRoles().contains(DiscoveryNodeRole.ML_ROLE) && MlConfigVersion.fromNode(node).onOrAfter(VERSION_INTRODUCED); + public static boolean mayAssignToNode(@Nullable DiscoveryNode node) { + return node != null + && node.getRoles().contains(DiscoveryNodeRole.ML_ROLE) + && MlConfigVersion.fromNode(node).onOrAfter(VERSION_INTRODUCED); } public static final MlConfigVersion VERSION_INTRODUCED = MlConfigVersion.V_8_0_0; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ModelAliasMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java similarity index 99% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ModelAliasMetadata.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java index 0847479489ec2..1d6c5e564a442 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ModelAliasMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/ModelAliasMetadata.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.ml.inference; +package org.elasticsearch.xpack.core.ml.inference; import org.elasticsearch.TransportVersion; import org.elasticsearch.TransportVersions; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfo.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfo.java index fd2f3627e3fb1..826b0785aa563 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfo.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfo.java @@ -86,6 +86,10 @@ public int getTargetAllocations() { return targetAllocations; } + public int getFailedAllocations() { + return state == RoutingState.FAILED ? targetAllocations : 0; + } + public RoutingState getState() { return state; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java index d27d325a5c596..8147dabda7b48 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignment.java @@ -287,6 +287,10 @@ public int totalTargetAllocations() { return nodeRoutingTable.values().stream().mapToInt(RoutingInfo::getTargetAllocations).sum(); } + public int totalFailedAllocations() { + return nodeRoutingTable.values().stream().mapToInt(RoutingInfo::getFailedAllocations).sum(); + } + @Override public boolean equals(Object o) { if (this == o) return true; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentMetadata.java similarity index 98% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentMetadata.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentMetadata.java index aabedfc4351b5..36fec9ec7b243 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentMetadata.java @@ -5,7 +5,7 @@ * 2.0. */ -package org.elasticsearch.xpack.ml.inference.assignment; +package org.elasticsearch.xpack.core.ml.inference.assignment; import org.elasticsearch.ResourceAlreadyExistsException; import org.elasticsearch.ResourceNotFoundException; @@ -23,7 +23,6 @@ import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import java.io.IOException; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentUtils.java similarity index 76% rename from x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentUtils.java rename to x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentUtils.java index 3640d8dcb2808..fa0ce4a095ba0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/inference/assignment/TrainedModelAssignmentUtils.java @@ -5,15 +5,10 @@ * 2.0. */ -package org.elasticsearch.xpack.ml.inference.assignment; +package org.elasticsearch.xpack.core.ml.inference.assignment; import org.elasticsearch.cluster.ClusterState; -import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; -import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfoUpdate; -import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; -import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingStateAndReason; -import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import java.util.List; import java.util.Optional; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java index ac934a71ec311..0337000a201f2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java @@ -294,11 +294,14 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws */ public Map asMap(NamedXContentRegistry xContentRegistry) throws IOException { String strRep = Strings.toString(this); - XContentParser parser = JsonXContent.jsonXContent.createParser( - XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry).withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), - strRep - ); - return parser.mapOrdered(); + try ( + XContentParser parser = JsonXContent.jsonXContent.createParser( + XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry).withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), + strRep + ) + ) { + return parser.mapOrdered(); + } } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java index 5d1b2ef9a08e5..466aa907b790b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/MlIndexAndAlias.java @@ -329,11 +329,9 @@ public static void installIndexTemplateIfRequired( } PutComposableIndexTemplateAction.Request request; - try { + try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, templateConfig.loadBytes())) { request = new PutComposableIndexTemplateAction.Request(templateConfig.getTemplateName()).indexTemplate( - ComposableIndexTemplate.parse( - JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, templateConfig.loadBytes()) - ) + ComposableIndexTemplate.parse(parser) ).masterNodeTimeout(masterTimeout); } catch (IOException e) { throw new ElasticsearchParseException("unable to parse composable template " + templateConfig.getTemplateName(), e); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java index c2d15e54ed667..7596fe75b4173 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/search/action/AsyncStatusResponse.java @@ -139,7 +139,7 @@ public AsyncStatusResponse(StreamInput in) throws IOException { } else { this.clusters = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { this.completionTimeMillis = in.readOptionalVLong(); } else { this.completionTimeMillis = null; @@ -164,7 +164,7 @@ public void writeTo(StreamOutput out) throws IOException { // optional since only CCS uses is; it is null for local-only searches out.writeOptionalWriteable(clusters); } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { out.writeOptionalVLong(completionTimeMillis); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/EnrollmentToken.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/EnrollmentToken.java index b5444449af1f4..466caa11771a5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/EnrollmentToken.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/EnrollmentToken.java @@ -109,11 +109,14 @@ public static EnrollmentToken decodeFromString(String encoded) throws IOExceptio if (Strings.isNullOrEmpty(encoded)) { throw new IOException("Cannot decode enrollment token from an empty string"); } - final XContentParser jsonParser = JsonXContent.jsonXContent.createParser( - XContentParserConfiguration.EMPTY, - Base64.getDecoder().decode(encoded) - ); - return EnrollmentToken.PARSER.parse(jsonParser, null); + try ( + XContentParser jsonParser = JsonXContent.jsonXContent.createParser( + XContentParserConfiguration.EMPTY, + Base64.getDecoder().decode(encoded) + ) + ) { + return EnrollmentToken.PARSER.parse(jsonParser, null); + } } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java new file mode 100644 index 0000000000000..fbc08a0dee8aa --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/ActionTypes.java @@ -0,0 +1,23 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.security.action; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; + +/** + * A collection of actions types for the Security plugin that need to be available in xpack.core.security and thus cannot be stored + * directly with their transport action implementation. + */ +public final class ActionTypes { + private ActionTypes() {}; + + public static final ActionType RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION = ActionType.localOnly( + "cluster:admin/xpack/security/remote_cluster_credentials/reload" + ); +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CrossClusterApiKeyRoleDescriptorBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CrossClusterApiKeyRoleDescriptorBuilder.java index 0763c208abf64..9695aeae283e2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CrossClusterApiKeyRoleDescriptorBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/CrossClusterApiKeyRoleDescriptorBuilder.java @@ -91,10 +91,9 @@ public RoleDescriptor build() { } public static CrossClusterApiKeyRoleDescriptorBuilder parse(String access) throws IOException { - return CrossClusterApiKeyRoleDescriptorBuilder.PARSER.parse( - JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, access), - null - ); + try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, access)) { + return CrossClusterApiKeyRoleDescriptorBuilder.PARSER.parse(parser, null); + } } static void validate(RoleDescriptor roleDescriptor) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyRequest.java index d76696dc4fe99..71e0c98fb0012 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyRequest.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/apikey/GetApiKeyRequest.java @@ -26,7 +26,7 @@ */ public final class GetApiKeyRequest extends ActionRequest { - static TransportVersion API_KEY_ACTIVE_ONLY_PARAM_TRANSPORT_VERSION = TransportVersions.V_8_500_054; + static TransportVersion API_KEY_ACTIVE_ONLY_PARAM_TRANSPORT_VERSION = TransportVersions.V_8_500_061; private final String realmName; private final String userName; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateResponse.java index 5c75bf685c330..73ee4d1f27299 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/action/user/AuthenticateResponse.java @@ -20,7 +20,7 @@ public class AuthenticateResponse extends ActionResponse implements ToXContent { - public static final TransportVersion VERSION_OPERATOR_FIELD = TransportVersions.V_8_500_040; + public static final TransportVersion VERSION_OPERATOR_FIELD = TransportVersions.V_8_500_061; private final Authentication authentication; private final boolean operator; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/TemplateRoleName.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/TemplateRoleName.java index 83a36510aa201..cf42d73c75131 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/TemplateRoleName.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/TemplateRoleName.java @@ -116,32 +116,35 @@ public void validate(ScriptService scriptService) { } private static List convertJsonToList(String evaluation) throws IOException { - final XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(XContentParserConfiguration.EMPTY, evaluation); - XContentParser.Token token = parser.currentToken(); - if (token == null) { - token = parser.nextToken(); - } - if (token == XContentParser.Token.VALUE_STRING) { - return Collections.singletonList(parser.text()); - } else if (token == XContentParser.Token.START_ARRAY) { - return parser.list().stream().filter(Objects::nonNull).map(o -> { - if (o instanceof String) { - return (String) o; - } else { - throw new XContentParseException( - "Roles array may only contain strings but found [" + o.getClass().getName() + "] [" + o + "]" - ); - } - }).collect(Collectors.toList()); - } else { - throw new XContentParseException("Roles template must generate a string or an array of strings, but found [" + token + "]"); + try ( + XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser(XContentParserConfiguration.EMPTY, evaluation) + ) { + XContentParser.Token token = parser.currentToken(); + if (token == null) { + token = parser.nextToken(); + } + if (token == XContentParser.Token.VALUE_STRING) { + return Collections.singletonList(parser.text()); + } else if (token == XContentParser.Token.START_ARRAY) { + return parser.list().stream().filter(Objects::nonNull).map(o -> { + if (o instanceof String) { + return (String) o; + } else { + throw new XContentParseException( + "Roles array may only contain strings but found [" + o.getClass().getName() + "] [" + o + "]" + ); + } + }).collect(Collectors.toList()); + } else { + throw new XContentParseException("Roles template must generate a string or an array of strings, but found [" + token + "]"); + } } } private String parseTemplate(ScriptService scriptService, Map parameters) throws IOException { - final XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, template, XContentType.JSON); - return MustacheTemplateEvaluator.evaluate(scriptService, parser, parameters); + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, template, XContentType.JSON)) { + return MustacheTemplateEvaluator.evaluate(scriptService, parser, parameters); + } } private static BytesReference extractTemplate(XContentParser parser, Void ignore) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/expressiondsl/ExpressionParser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/expressiondsl/ExpressionParser.java index 5ec28dc68181e..e3f6b1aa450a4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/expressiondsl/ExpressionParser.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/support/mapper/expressiondsl/ExpressionParser.java @@ -57,8 +57,8 @@ public static RoleMapperExpression parseObject(XContentParser parser, String id) * @param content The XContent (typically JSON) DSL representation of the expression */ public RoleMapperExpression parse(String name, XContentSource content) throws IOException { - try (InputStream stream = content.getBytes().streamInput()) { - return parse(name, content.parser(NamedXContentRegistry.EMPTY, stream)); + try (InputStream stream = content.getBytes().streamInput(); var parser = content.parser(NamedXContentRegistry.EMPTY, stream)) { + return parse(name, parser); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java index 2616b63df7c01..013d7cc21a54a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/privilege/SystemPrivilege.java @@ -12,6 +12,7 @@ import org.elasticsearch.index.seqno.RetentionLeaseSyncAction; import org.elasticsearch.persistent.CompletionPersistentTaskAction; import org.elasticsearch.transport.TransportActionProxy; +import org.elasticsearch.xpack.core.security.action.ActionTypes; import org.elasticsearch.xpack.core.security.support.StringMatcher; import java.util.Collections; @@ -43,7 +44,8 @@ public final class SystemPrivilege extends Privilege { "indices:data/read/*", // needed for SystemIndexMigrator "indices:admin/refresh", // needed for SystemIndexMigrator "indices:admin/aliases", // needed for SystemIndexMigrator - TransportSearchShardsAction.TYPE.name() // added so this API can be called with the system user by other APIs + TransportSearchShardsAction.TYPE.name(), // added so this API can be called with the system user by other APIs + ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION.name() // needed for Security plugin reload of remote cluster credentials ); private static final Predicate PREDICATE = (action) -> { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java index 474ba25e3e117..8004848f59235 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @@ -251,8 +251,12 @@ static RoleDescriptor kibanaSystem(String name) { "indices:admin/data_stream/lifecycle/put" ) .build(), - // Endpoint specific action responses. Kibana reads from these to display responses to the user. - RoleDescriptor.IndicesPrivileges.builder().indices(".logs-endpoint.action.responses-*").privileges("read").build(), + // Endpoint specific action responses. Kibana reads and writes (for third party agents) to the index + // to display action responses to the user. + RoleDescriptor.IndicesPrivileges.builder() + .indices(".logs-endpoint.action.responses-*") + .privileges("auto_configure", "read", "write") + .build(), // Endpoint specific actions. Kibana reads and writes to this index to track new actions and display them. RoleDescriptor.IndicesPrivileges.builder() .indices(".logs-endpoint.actions-*") diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java index b0f1c78b0c99d..ddc565c3f46a6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/template/IndexTemplateRegistry.java @@ -646,10 +646,8 @@ public void onFailure(Exception e) { protected static Map parseComposableTemplates(IndexTemplateConfig... config) { return Arrays.stream(config).collect(Collectors.toUnmodifiableMap(IndexTemplateConfig::getTemplateName, indexTemplateConfig -> { - try { - return ComposableIndexTemplate.parse( - JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, indexTemplateConfig.loadBytes()) - ); + try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, indexTemplateConfig.loadBytes())) { + return ComposableIndexTemplate.parse(parser); } catch (IOException e) { throw new AssertionError(e); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/QueryConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/QueryConfig.java index 2df00837f9a3a..8bf8a40c69b2a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/QueryConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/QueryConfig.java @@ -106,11 +106,14 @@ private static QueryBuilder queryFromXContent( NamedXContentRegistry namedXContentRegistry, DeprecationHandler deprecationHandler ) throws IOException { - QueryBuilder query = null; + final QueryBuilder query; XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(source); - XContentParser sourceParser = XContentType.JSON.xContent() - .createParser(namedXContentRegistry, deprecationHandler, BytesReference.bytes(xContentBuilder).streamInput()); - query = AbstractQueryBuilder.parseTopLevelQuery(sourceParser); + try ( + XContentParser sourceParser = XContentType.JSON.xContent() + .createParser(namedXContentRegistry, deprecationHandler, BytesReference.bytes(xContentBuilder).streamInput()) + ) { + query = AbstractQueryBuilder.parseTopLevelQuery(sourceParser); + } return query; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/AggregationConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/AggregationConfig.java index 763f328ecfa0b..095ada7ced411 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/AggregationConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/transform/transforms/pivot/AggregationConfig.java @@ -139,13 +139,15 @@ private static AggregatorFactories.Builder aggregationsFromXContent( NamedXContentRegistry namedXContentRegistry, DeprecationHandler deprecationHandler ) throws IOException { - AggregatorFactories.Builder aggregations = null; - + final AggregatorFactories.Builder aggregations; XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(source); - XContentParser sourceParser = XContentType.JSON.xContent() - .createParser(namedXContentRegistry, deprecationHandler, BytesReference.bytes(xContentBuilder).streamInput()); - sourceParser.nextToken(); - aggregations = AggregatorFactories.parseAggregators(sourceParser); + try ( + XContentParser sourceParser = XContentType.JSON.xContent() + .createParser(namedXContentRegistry, deprecationHandler, BytesReference.bytes(xContentBuilder).streamInput()) + ) { + sourceParser.nextToken(); + aggregations = AggregatorFactories.parseAggregators(sourceParser); + } return aggregations; } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java index b98a8abc019d0..df4f7828d1fed 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ClientHelperTests.java @@ -24,7 +24,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.Maps; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.TransportVersionUtils; import org.elasticsearch.threadpool.ThreadPool; @@ -248,16 +248,7 @@ public void testExecuteWithHeadersNoHeaders() { PlainActionFuture searchFuture = new PlainActionFuture<>(); searchFuture.onResponse( - new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, - null, - 0, - 0, - 0, - 0L, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ) + SearchResponseUtils.emptyWithTotalHits(null, 0, 0, 0, 0L, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY) ); when(client.search(any())).thenReturn(searchFuture); assertExecutionWithOrigin(Collections.emptyMap(), client); @@ -272,16 +263,7 @@ public void testExecuteWithHeaders() { PlainActionFuture searchFuture = new PlainActionFuture<>(); searchFuture.onResponse( - new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, - null, - 0, - 0, - 0, - 0L, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ) + SearchResponseUtils.emptyWithTotalHits(null, 0, 0, 0, 0L, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY) ); when(client.search(any())).thenReturn(searchFuture); Map headers = Map.of( @@ -307,16 +289,7 @@ public void testExecuteWithHeadersNoSecurityHeaders() { PlainActionFuture searchFuture = new PlainActionFuture<>(); searchFuture.onResponse( - new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, - null, - 0, - 0, - 0, - 0L, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ) + SearchResponseUtils.emptyWithTotalHits(null, 0, 0, 0, 0L, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY) ); when(client.search(any())).thenReturn(searchFuture); Map unrelatedHeaders = Map.of(randomAlphaOfLength(10), "anything"); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java index bc191349ea601..e6bf5d067741b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/async/AsyncTaskServiceTests.java @@ -216,11 +216,13 @@ public void testAutoCreateIndex() throws Exception { // To begin with, the results index should be auto-created. AsyncExecutionId id = new AsyncExecutionId("0", new TaskId("N/A", 0)); AsyncSearchResponse resp = new AsyncSearchResponse(id.getEncoded(), true, true, 0L, 0L); - { + try { PlainActionFuture future = new PlainActionFuture<>(); indexService.createResponse(id.getDocId(), Collections.emptyMap(), resp, future); future.get(); assertSettings(); + } finally { + resp.decRef(); } // Delete the index, so we can test subsequent auto-create behaviour diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java index 29453205b4d00..25b7bd082243d 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/indexing/AsyncTwoPhaseIndexerTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; @@ -116,18 +115,24 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener return; } - final SearchResponseSections sections = new SearchResponseSections( - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), - null, - null, - false, - null, - null, - 1 - ); ActionListener.respondAndRelease( nextPhase, - new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null) + new SearchResponse( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), + null, + null, + false, + null, + null, + 1, + null, + 1, + 1, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + null + ) ); } @@ -256,15 +261,6 @@ public boolean waitingForLatchCountDown() { @Override protected void doNextSearch(long waitTimeInNanos, ActionListener nextPhase) { ++searchOps; - final SearchResponseSections sections = new SearchResponseSections( - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), - null, - null, - false, - null, - null, - 1 - ); if (processOps == 3) { awaitForLatch(); @@ -272,7 +268,22 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener ActionListener.respondAndRelease( nextPhase, - new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null) + new SearchResponse( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), + null, + null, + false, + null, + null, + 1, + null, + 1, + 1, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + null + ) ); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelActionTests.java new file mode 100644 index 0000000000000..10f35bf33f631 --- /dev/null +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/inference/action/PutInferenceModelActionTests.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.core.inference.action; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.inference.TaskType; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.core.ml.utils.MlStringsTests; +import org.junit.Before; + +import java.util.Locale; + +public class PutInferenceModelActionTests extends ESTestCase { + public static String TASK_TYPE; + public static String MODEL_ID; + public static XContentType X_CONTENT_TYPE; + public static BytesReference BYTES; + + @Before + public void setup() throws Exception { + TASK_TYPE = TaskType.ANY.toString(); + MODEL_ID = randomAlphaOfLengthBetween(1, 10).toLowerCase(Locale.ROOT); + X_CONTENT_TYPE = randomFrom(XContentType.values()); + BYTES = new BytesArray(randomAlphaOfLengthBetween(1, 10)); + } + + public void testValidate() { + // valid model ID + var request = new PutInferenceModelAction.Request(TASK_TYPE, MODEL_ID + "_-0", BYTES, X_CONTENT_TYPE); + ActionRequestValidationException validationException = request.validate(); + assertNull(validationException); + + // invalid model IDs + + var invalidRequest = new PutInferenceModelAction.Request(TASK_TYPE, "", BYTES, X_CONTENT_TYPE); + validationException = invalidRequest.validate(); + assertNotNull(validationException); + + var invalidRequest2 = new PutInferenceModelAction.Request( + TASK_TYPE, + randomAlphaOfLengthBetween(1, 10) + randomFrom(MlStringsTests.SOME_INVALID_CHARS), + BYTES, + X_CONTENT_TYPE + ); + validationException = invalidRequest2.validate(); + assertNotNull(validationException); + + var invalidRequest3 = new PutInferenceModelAction.Request(TASK_TYPE, null, BYTES, X_CONTENT_TYPE); + validationException = invalidRequest3.validate(); + assertNotNull(validationException); + } +} diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartActionRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartActionRequestTests.java index 4ffa2e27fe60c..ee304f966c9b4 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartActionRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/action/PutTrainedModelDefinitionPartActionRequestTests.java @@ -72,7 +72,7 @@ protected Writeable.Reader instanceReader() { @Override protected Request mutateInstanceForVersion(Request instance, TransportVersion version) { - if (version.before(TransportVersions.V_8_500_043)) { + if (version.before(TransportVersions.V_8_500_061)) { return new Request( instance.getModelId(), instance.getDefinition(), diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfoTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfoTests.java index 28ebf8b2445c5..830f7dde7c7d8 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfoTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/inference/assignment/RoutingInfoTests.java @@ -69,4 +69,17 @@ public void testIsRoutable_GivenStartedWithNonZeroAllocations() { RoutingInfo routingInfo = new RoutingInfo(randomIntBetween(1, 10), 1, RoutingState.STARTED, ""); assertThat(routingInfo.isRoutable(), is(true)); } + + public void testGetFailedAllocations() { + int targetAllocations = randomIntBetween(1, 10); + RoutingInfo routingInfo = new RoutingInfo( + randomIntBetween(0, targetAllocations), + targetAllocations, + randomFrom(RoutingState.STARTING, RoutingState.STARTED, RoutingState.STOPPING), + "" + ); + assertThat(routingInfo.getFailedAllocations(), is(0)); + routingInfo = new RoutingInfo(randomIntBetween(0, targetAllocations), targetAllocations, RoutingState.FAILED, ""); + assertThat(routingInfo.getFailedAllocations(), is(targetAllocations)); + } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java index 880b62689dee2..c8fbe00d07618 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java @@ -108,12 +108,14 @@ public void testToXContentForInternalStorage() throws IOException { ToXContent.MapParams params = new ToXContent.MapParams(Collections.singletonMap(ToXContentParams.FOR_INTERNAL_STORAGE, "true")); BytesReference serializedJob = XContentHelper.toXContent(config, XContentType.JSON, params, false); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry()), serializedJob.streamInput()); - - Job parsedConfig = Job.LENIENT_PARSER.apply(parser, null).build(); - // When we are writing for internal storage, we do not include the datafeed config - assertThat(parsedConfig.getDatafeedConfig().isPresent(), is(false)); + try ( + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry()), serializedJob.streamInput()) + ) { + Job parsedConfig = Job.LENIENT_PARSER.apply(parser, null).build(); + // When we are writing for internal storage, we do not include the datafeed config + assertThat(parsedConfig.getDatafeedConfig().isPresent(), is(false)); + } } public void testFutureConfigParse() throws IOException { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java index e1a9b20c048c4..c9370545036ff 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/persistence/ElasticsearchMappingsTests.java @@ -362,10 +362,9 @@ private Set collectResultsDocFieldNames() throws IOException { private Set collectFieldNames(String mapping) throws IOException { BufferedInputStream inputStream = new BufferedInputStream(new ByteArrayInputStream(mapping.getBytes(StandardCharsets.UTF_8))); - XContentParser parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, inputStream); Set fieldNames = new HashSet<>(); boolean isAfterPropertiesStart = false; - try { + try (XContentParser parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, inputStream)) { XContentParser.Token token = parser.nextToken(); while (token != null) { switch (token) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/MlStringsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlStringsTests.java similarity index 87% rename from x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/MlStringsTests.java rename to x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlStringsTests.java index fb60ac39bdef1..04681fe6e0cd0 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/MlStringsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/utils/MlStringsTests.java @@ -4,10 +4,9 @@ * 2.0; you may not use this file except in compliance with the Elastic License * 2.0. */ -package org.elasticsearch.xpack.ml.utils; +package org.elasticsearch.xpack.core.ml.utils; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.ml.utils.MlStrings; import java.util.Arrays; import java.util.Collections; @@ -22,6 +21,37 @@ public class MlStringsTests extends ESTestCase { + public static final String[] SOME_INVALID_CHARS = { + "%", + " ", + "!", + "@", + "#", + "$", + "^", + "&", + "*", + "(", + ")", + "+", + "=", + "{", + "}", + "[", + "]", + "|", + "\\", + ":", + ";", + "\"", + "'", + "<", + ">", + ",", + "?", + "/", + "~" }; + public void testDoubleQuoteIfNotAlphaNumeric() { assertEquals("foo2", MlStrings.doubleQuoteIfNotAlphaNumeric("foo2")); assertEquals("\"fo o\"", MlStrings.doubleQuoteIfNotAlphaNumeric("fo o")); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleRestrictionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleRestrictionTests.java index fe64192cb0601..244e21f3f036c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleRestrictionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/RoleRestrictionTests.java @@ -72,9 +72,10 @@ public void testToXContent() throws Exception { final Restriction restriction = randomWorkflowsRestriction(1, 5); final XContentType xContentType = randomFrom(XContentType.values()); final BytesReference xContentValue = toShuffledXContent(restriction, xContentType, ToXContent.EMPTY_PARAMS, false); - final XContentParser parser = xContentType.xContent().createParser(XContentParserConfiguration.EMPTY, xContentValue.streamInput()); - final Restriction parsed = Restriction.parse(randomAlphaOfLengthBetween(3, 6), parser); - assertThat(parsed, equalTo(restriction)); + try (XContentParser parser = xContentType.xContent().createParser(XContentParserConfiguration.EMPTY, xContentValue.streamInput())) { + final Restriction parsed = Restriction.parse(randomAlphaOfLengthBetween(3, 6), parser); + assertThat(parsed, equalTo(restriction)); + } } public void testSerialization() throws IOException { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index 831dc58e14003..65f15c9c08e43 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -899,8 +899,8 @@ public void testKibanaSystemRole() { ); assertThat(kibanaRole.indices().allowedIndicesMatcher(GetIndexAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(false)); - assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportIndexAction.NAME).test(indexAbstraction), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportDeleteAction.NAME).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportSearchAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportMultiSearchAction.TYPE.name()).test(indexAbstraction), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(TransportGetAction.TYPE.name()).test(indexAbstraction), is(true)); @@ -1150,7 +1150,8 @@ public void testKibanaSystemRole() { is(true) ); - final boolean isAlsoAutoCreateIndex = indexName.startsWith(".logs-endpoint.actions-"); + final boolean isAlsoAutoCreateIndex = indexName.startsWith(".logs-endpoint.actions-") + || indexName.startsWith(".logs-endpoint.action.responses-"); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(indexAbstraction), is(false)); assertThat(kibanaRole.indices().allowedIndicesMatcher(AutoCreateAction.NAME).test(indexAbstraction), is(isAlsoAutoCreateIndex)); assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateDataStreamAction.NAME).test(indexAbstraction), is(false)); diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ilm-history-ilm-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/ilm-history-ilm-policy.json index 2b27a29e60c64..52edf97cd026e 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ilm-history-ilm-policy.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ilm-history-ilm-policy.json @@ -5,6 +5,9 @@ "rollover": { "max_primary_shard_size": "50gb", "max_age": "30d" + }, + "forcemerge": { + "max_num_segments": 1 } } }, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/ilm-history.json b/x-pack/plugin/core/template-resources/src/main/resources/ilm-history.json index 95c3f0133a02f..e549d3bb3d168 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/ilm-history.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/ilm-history.json @@ -9,8 +9,10 @@ "settings": { "index.number_of_shards": 1, "index.number_of_replicas": 0, - "index.auto_expand_replicas": "0-1", - "index.lifecycle.name": "ilm-history-ilm-policy" + "index.auto_expand_replicas": "0-1" + }, + "lifecycle": { + "data_retention": "90d" }, "mappings": { "dynamic": false, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/slm-history-ilm-policy.json b/x-pack/plugin/core/template-resources/src/main/resources/slm-history-ilm-policy.json index bff5c9a9477ac..f1eabad805b81 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/slm-history-ilm-policy.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/slm-history-ilm-policy.json @@ -5,6 +5,9 @@ "rollover": { "max_primary_shard_size": "50gb", "max_age": "30d" + }, + "forcemerge": { + "max_num_segments": 1 } } }, diff --git a/x-pack/plugin/core/template-resources/src/main/resources/slm-history.json b/x-pack/plugin/core/template-resources/src/main/resources/slm-history.json index 974e72e0795e9..c154cdfe19d66 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/slm-history.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/slm-history.json @@ -10,8 +10,10 @@ "settings": { "index.number_of_shards": 1, "index.number_of_replicas": 0, - "index.auto_expand_replicas": "0-1", - "index.lifecycle.name": "slm-history-ilm-policy" + "index.auto_expand_replicas": "0-1" + }, + "lifecycle": { + "data_retention": "90d" }, "mappings": { "dynamic": false, diff --git a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/logging/DeprecationIndexingTemplateRegistry.java b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/logging/DeprecationIndexingTemplateRegistry.java index 6b4882bae9fd8..065053f117de0 100644 --- a/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/logging/DeprecationIndexingTemplateRegistry.java +++ b/x-pack/plugin/deprecation/src/main/java/org/elasticsearch/xpack/deprecation/logging/DeprecationIndexingTemplateRegistry.java @@ -70,11 +70,8 @@ public DeprecationIndexingTemplateRegistry( DEPRECATION_INDEXING_TEMPLATE_VERSION_VARIABLE ) )) { - try { - componentTemplates.put( - config.getTemplateName(), - ComponentTemplate.parse(JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, config.loadBytes())) - ); + try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, config.loadBytes())) { + componentTemplates.put(config.getTemplateName(), ComponentTemplate.parse(parser)); } catch (IOException e) { throw new AssertionError(e); } diff --git a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml index 70f66f38d39b9..0eb93c59c5b1d 100644 --- a/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml +++ b/x-pack/plugin/downsample/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/downsample/10_basic.yml @@ -46,9 +46,17 @@ setup: multi-counter: type: long time_series_metric: counter + scaled-counter: + type: scaled_float + scaling_factor: 100 + time_series_metric: counter multi-gauge: type: integer time_series_metric: gauge + scaled-gauge: + type: scaled_float + scaling_factor: 100 + time_series_metric: gauge network: properties: tx: @@ -63,21 +71,21 @@ setup: index: test body: - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "multi-counter" : [10, 11, 12], "multi-gauge": [100, 200, 150], "network": {"tx": 2001818691, "rx": 802133794}, "created_at": "2021-04-28T19:34:00.000Z", "running": false, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 6]}}}' + - '{"@timestamp": "2021-04-28T18:50:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.1", "multi-counter" : [10, 11, 12], "scaled-counter": 10.0, "multi-gauge": [100, 200, 150], "scaled-gauge": 100.0, "network": {"tx": 2001818691, "rx": 802133794}, "created_at": "2021-04-28T19:34:00.000Z", "running": false, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 6]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:24.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.26", "multi-counter" : [21, 22, 23], "multi-gauge": [90, 91, 95], "network": {"tx": 2005177954, "rx": 801479970}, "created_at": "2021-04-28T19:35:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west1"], "values": [1, 1, 3]}}}' + - '{"@timestamp": "2021-04-28T18:50:24.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.26", "multi-counter" : [21, 22, 23], "scaled-counter": 20.0, "multi-gauge": [90, 91, 95], "scaled-gauge": 90.0, "network": {"tx": 2005177954, "rx": 801479970}, "created_at": "2021-04-28T19:35:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west1"], "values": [1, 1, 3]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T20:50:44.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.41", "multi-counter" : [1, 5, 10], "multi-gauge": [103, 110, 109], "network": {"tx": 2006223737, "rx": 802337279}, "created_at": "2021-04-28T19:36:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west2"], "values": [4, 1, 2]}}}' + - '{"@timestamp": "2021-04-28T20:50:44.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.41", "multi-counter" : [1, 5, 10], "scaled-counter": 1.0, "multi-gauge": [103, 110, 109], "scaled-gauge": 104.0, "network": {"tx": 2006223737, "rx": 802337279}, "created_at": "2021-04-28T19:36:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod", "us-west2"], "values": [4, 1, 2]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T20:51:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.22", "multi-counter" : [101, 102, 105], "multi-gauge": [100, 100, 100], "network": {"tx": 2012916202, "rx": 803685721}, "created_at": "2021-04-28T19:37:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 1]}}}' + - '{"@timestamp": "2021-04-28T20:51:04.467Z", "metricset": "pod", "k8s": {"pod": {"name": "cat", "uid":"947e4ced-1786-4e53-9e0c-5c447e959507", "ip": "10.10.55.22", "multi-counter" : [101, 102, 105], "scaled-counter": 100.0, "multi-gauge": [100, 100, 100], "scaled-gauge": 102.0, "network": {"tx": 2012916202, "rx": 803685721}, "created_at": "2021-04-28T19:37:00.000Z", "running": true, "number_of_containers": 2, "tags": ["backend", "prod"], "values": [2, 3, 1]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.33", "multi-counter" : [7, 11, 44], "multi-gauge": [100, 100, 102], "network": {"tx": 1434521831, "rx": 530575198}, "created_at": "2021-04-28T19:42:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test"], "values": [2, 3, 4]}}}' + - '{"@timestamp": "2021-04-28T18:50:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.33", "multi-counter" : [7, 11, 44], "scaled-counter": 7.0, "multi-gauge": [100, 100, 102], "scaled-gauge": 100.0, "network": {"tx": 1434521831, "rx": 530575198}, "created_at": "2021-04-28T19:42:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test"], "values": [2, 3, 4]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T18:50:23.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.56", "multi-counter" : [0, 0, 1], "multi-gauge": [101, 102, 102], "network": {"tx": 1434577921, "rx": 530600088}, "created_at": "2021-04-28T19:43:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test", "us-west2"], "values": [2, 1, 1]}}}' + - '{"@timestamp": "2021-04-28T18:50:23.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.56", "multi-counter" : [0, 0, 1], "scaled-counter": 0.0, "multi-gauge": [101, 102, 102], "scaled-gauge": 101.0, "network": {"tx": 1434577921, "rx": 530600088}, "created_at": "2021-04-28T19:43:00.000Z", "running": false, "number_of_containers": 1, "tags": ["backend", "test", "us-west2"], "values": [2, 1, 1]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T19:50:53.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.37", "multi-counter" : [1000, 1001, 1002], "multi-gauge": [99, 100, 110], "network": {"tx": 1434587694, "rx": 530604797}, "created_at": "2021-04-28T19:44:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [4, 5, 2]}}}' + - '{"@timestamp": "2021-04-28T19:50:53.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.37", "multi-counter" : [1000, 1001, 1002], "scaled-counter": 1000.0, "multi-gauge": [99, 100, 110], "scaled-gauge": 99.0, "network": {"tx": 1434587694, "rx": 530604797}, "created_at": "2021-04-28T19:44:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [4, 5, 2]}}}' - '{"index": {}}' - - '{"@timestamp": "2021-04-28T19:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.120", "multi-counter" : [76, 77, 78], "multi-gauge": [95, 98, 100], "network": {"tx": 1434595272, "rx": 530605511}, "created_at": "2021-04-28T19:45:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [3, 2, 1]}}}' + - '{"@timestamp": "2021-04-28T19:51:03.142Z", "metricset": "pod", "k8s": {"pod": {"name": "dog", "uid":"df3145b3-0563-4d3b-a0f7-897eb2876ea9", "ip": "10.10.55.120", "multi-counter" : [76, 77, 78], "scaled-counter": 70.0, "multi-gauge": [95, 98, 100], "scaled-gauge": 95.0, "network": {"tx": 1434595272, "rx": 530605511}, "created_at": "2021-04-28T19:45:00.000Z", "running": true, "number_of_containers": 1, "tags": ["backend", "test", "us-west1"], "values": [3, 2, 1]}}}' - do: indices.put_settings: @@ -314,10 +322,15 @@ setup: - match: { hits.hits.0._source.metricset: pod } - match: { hits.hits.0._source.@timestamp: 2021-04-28T18:00:00.000Z } - match: { hits.hits.0._source.k8s.pod.multi-counter: 21 } + - match: { hits.hits.0._source.k8s.pod.scaled-counter: 20.0 } - match: { hits.hits.0._source.k8s.pod.multi-gauge.min: 90 } - match: { hits.hits.0._source.k8s.pod.multi-gauge.max: 200 } - match: { hits.hits.0._source.k8s.pod.multi-gauge.sum: 726 } - match: { hits.hits.0._source.k8s.pod.multi-gauge.value_count: 6 } + - match: { hits.hits.0._source.k8s.pod.scaled-gauge.min: 90.0 } + - match: { hits.hits.0._source.k8s.pod.scaled-gauge.max: 100.0 } + - match: { hits.hits.0._source.k8s.pod.scaled-gauge.sum: 190.0 } + - match: { hits.hits.0._source.k8s.pod.scaled-gauge.value_count: 2 } - match: { hits.hits.0._source.k8s.pod.network.tx.min: 2001818691 } - match: { hits.hits.0._source.k8s.pod.network.tx.max: 2005177954 } - match: { hits.hits.0._source.k8s.pod.network.tx.value_count: 2 } @@ -354,6 +367,13 @@ setup: - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-gauge.time_series_metric: gauge } - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-counter.type: long } - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.multi-counter.time_series_metric: counter } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.type: scaled_float } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.scaling_factor: 100 } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-counter.time_series_metric: counter } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.type: aggregate_metric_double } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.metrics: [ "min", "max", "sum", "value_count" ] } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.default_metric: max } + - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.scaled-gauge.time_series_metric: gauge } - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.uid.type: keyword } - match: { test-downsample.mappings.properties.k8s.properties.pod.properties.uid.time_series_dimension: true } @@ -387,6 +407,38 @@ setup: "fixed_interval": "1h" } +--- +"Downsample failure": + - skip: + version: " - 8.12.99" + reason: "#103615 merged to 8.13.0 and later" + features: allowed_warnings + + - do: + allowed_warnings: + - "index template [my-template1] has index patterns [failed-downsample-test] matching patterns from existing older templates [global] with patterns (global => [*]); this template [my-template1] will take precedence during new index creation" + indices.put_index_template: + name: my-template1 + body: + index_patterns: [failed-downsample-test] + template: + settings: + index: + routing: + allocation: + include: + does-not-exist: "yes" + + - do: + catch: /downsample task \[downsample-failed-downsample-test-0-1h\] failed/ + indices.downsample: + index: test + target_index: failed-downsample-test + body: > + { + "fixed_interval": "1h" + } + --- "Downsample to existing index": - skip: diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java index 826c958de4c18..f248da8a7842a 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DataStreamLifecycleDownsampleDisruptionIT.java @@ -126,11 +126,7 @@ public boolean validateClusterForming() { } })).start(); - waitUntil( - () -> cluster.client().admin().cluster().preparePendingClusterTasks().get().pendingTasks().isEmpty(), - 60, - TimeUnit.SECONDS - ); + waitUntil(() -> getClusterPendingTasks(cluster.client()).pendingTasks().isEmpty(), 60, TimeUnit.SECONDS); ensureStableCluster(cluster.numDataAndMasterNodes()); final String targetIndex = "downsample-5m-" + sourceIndex; diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleClusterDisruptionIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleClusterDisruptionIT.java index d6549a9618d36..30066e21e4960 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleClusterDisruptionIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/DownsampleClusterDisruptionIT.java @@ -203,7 +203,7 @@ public boolean validateClusterForming() { } })).start(); startDownsampleTaskDuringDisruption(sourceIndex, targetIndex, config, disruptionStart, disruptionEnd); - waitUntil(() -> cluster.client().admin().cluster().preparePendingClusterTasks().get().pendingTasks().isEmpty()); + waitUntil(() -> getClusterPendingTasks(cluster.client()).pendingTasks().isEmpty()); ensureStableCluster(cluster.numDataAndMasterNodes()); assertTargetIndex(cluster, sourceIndex, targetIndex, indexedDocs); } @@ -265,7 +265,7 @@ public boolean validateClusterForming() { })).start(); startDownsampleTaskDuringDisruption(sourceIndex, targetIndex, config, disruptionStart, disruptionEnd); - waitUntil(() -> cluster.client().admin().cluster().preparePendingClusterTasks().get().pendingTasks().isEmpty()); + waitUntil(() -> getClusterPendingTasks(cluster.client()).pendingTasks().isEmpty()); ensureStableCluster(cluster.numDataAndMasterNodes()); assertTargetIndex(cluster, sourceIndex, targetIndex, indexedDocs); } @@ -354,7 +354,7 @@ public boolean validateClusterForming() { })).start(); startDownsampleTaskDuringDisruption(sourceIndex, downsampleIndex, config, disruptionStart, disruptionEnd); - waitUntil(() -> cluster.client().admin().cluster().preparePendingClusterTasks().get().pendingTasks().isEmpty()); + waitUntil(() -> getClusterPendingTasks(cluster.client()).pendingTasks().isEmpty()); ensureStableCluster(cluster.numDataAndMasterNodes()); assertTargetIndex(cluster, sourceIndex, downsampleIndex, indexedDocs); } @@ -429,7 +429,7 @@ private void downsample(final String sourceIndex, final String downsampleIndex, assertAcked( internalCluster().client() .execute(DownsampleAction.INSTANCE, new DownsampleAction.Request(sourceIndex, downsampleIndex, TIMEOUT, config)) - .actionGet(TIMEOUT.millis()) + .actionGet(TIMEOUT) ); } diff --git a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/ILMDownsampleDisruptionIT.java b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/ILMDownsampleDisruptionIT.java index a023f171ad209..0e84ed460cf5d 100644 --- a/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/ILMDownsampleDisruptionIT.java +++ b/x-pack/plugin/downsample/src/internalClusterTest/java/org/elasticsearch/xpack/downsample/ILMDownsampleDisruptionIT.java @@ -194,7 +194,7 @@ public boolean validateClusterForming() { final String targetIndex = "downsample-1h-" + sourceIndex; startDownsampleTaskViaIlm(sourceIndex, targetIndex, disruptionStart, disruptionEnd); - waitUntil(() -> cluster.client().admin().cluster().preparePendingClusterTasks().get().pendingTasks().isEmpty()); + waitUntil(() -> getClusterPendingTasks(cluster.client()).pendingTasks().isEmpty(), 60, TimeUnit.SECONDS); ensureStableCluster(cluster.numDataAndMasterNodes()); assertTargetIndex(cluster, targetIndex, indexedDocs); } diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java index 06e69ab4702c1..ebf31bd32b48f 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardPersistentTaskExecutor.java @@ -159,12 +159,13 @@ private void delegate( final DownsampleShardTaskParams params, final SearchHit[] lastDownsampledTsidHits ) { + DownsampleShardTask downsampleShardTask = (DownsampleShardTask) task; client.execute( DelegatingAction.INSTANCE, - new DelegatingAction.Request((DownsampleShardTask) task, lastDownsampledTsidHits, params), + new DelegatingAction.Request(downsampleShardTask, lastDownsampledTsidHits, params), ActionListener.wrap(empty -> {}, e -> { LOGGER.error("error while delegating", e); - markAsFailed(task, e); + markAsFailed(downsampleShardTask, e); }) ); } @@ -222,7 +223,8 @@ protected void doRun() throws Exception { }); } - private static void markAsFailed(AllocatedPersistentTask task, Exception e) { + private static void markAsFailed(DownsampleShardTask task, Exception e) { + task.setDownsampleShardIndexerStatus(DownsampleShardIndexerStatus.FAILED); task.updatePersistentTaskState( new DownsampleShardPersistentTaskState(DownsampleShardIndexerStatus.FAILED, null), ActionListener.running(() -> task.markAsFailed(e)) diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java index 6a4ee88a0cdef..34b7d3c90b267 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/DownsampleShardTaskParams.java @@ -91,7 +91,7 @@ public String getWriteableName() { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_054; + return TransportVersions.V_8_500_061; } @Override diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/RestDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/RestDownsampleAction.java index 76f19388e7ee7..8324265c3a786 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/RestDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/RestDownsampleAction.java @@ -35,7 +35,10 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient String sourceIndex = restRequest.param("index"); String targetIndex = restRequest.param("target_index"); String timeout = restRequest.param("timeout"); - DownsampleConfig config = DownsampleConfig.fromXContent(restRequest.contentParser()); + DownsampleConfig config; + try (var parser = restRequest.contentParser()) { + config = DownsampleConfig.fromXContent(parser); + } DownsampleAction.Request request = new DownsampleAction.Request( sourceIndex, targetIndex, diff --git a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java index 4dc5195f8345a..e7bd2f0c0fb27 100644 --- a/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java +++ b/x-pack/plugin/downsample/src/main/java/org/elasticsearch/xpack/downsample/TransportDownsampleAction.java @@ -400,6 +400,19 @@ private void performShardDownsampling( @Override public void onResponse(PersistentTasksCustomMetadata.PersistentTask persistentTask) { + if (persistentTask != null) { + var runningPersistentTaskState = (DownsampleShardPersistentTaskState) persistentTask.getState(); + if (runningPersistentTaskState != null) { + if (runningPersistentTaskState.failed()) { + onFailure(new ElasticsearchException("downsample task [" + persistentTaskId + "] failed")); + return; + } else if (runningPersistentTaskState.cancelled()) { + onFailure(new ElasticsearchException("downsample task [" + persistentTaskId + "] cancelled")); + return; + } + } + } + logger.info("Downsampling task [" + persistentTaskId + " completed for shard " + params.shardId()); if (countDown.decrementAndGet() == 0) { logger.info("All downsampling tasks completed [" + numberOfShards + "]"); @@ -598,21 +611,23 @@ private static void addMetricFieldMapping(final XContentBuilder builder, final S final TimeSeriesParams.MetricType metricType = TimeSeriesParams.MetricType.fromString( fieldProperties.get(TIME_SERIES_METRIC_PARAM).toString() ); + builder.startObject(field); if (metricType == TimeSeriesParams.MetricType.COUNTER) { // For counters, we keep the same field type, because they store // only one value (the last value of the counter) - builder.startObject(field).field("type", fieldProperties.get("type")).field(TIME_SERIES_METRIC_PARAM, metricType).endObject(); + for (String fieldProperty : fieldProperties.keySet()) { + builder.field(fieldProperty, fieldProperties.get(fieldProperty)); + } } else { final String[] supportedAggsArray = metricType.supportedAggs(); // We choose max as the default metric final String defaultMetric = List.of(supportedAggsArray).contains("max") ? "max" : supportedAggsArray[0]; - builder.startObject(field) - .field("type", AggregateDoubleMetricFieldMapper.CONTENT_TYPE) + builder.field("type", AggregateDoubleMetricFieldMapper.CONTENT_TYPE) .array(AggregateDoubleMetricFieldMapper.Names.METRICS, supportedAggsArray) .field(AggregateDoubleMetricFieldMapper.Names.DEFAULT_METRIC, defaultMetric) - .field(TIME_SERIES_METRIC_PARAM, metricType) - .endObject(); + .field(TIME_SERIES_METRIC_PARAM, metricType); } + builder.endObject(); } private static void validateDownsamplingInterval(MapperService mapperService, DownsampleConfig config) { diff --git a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java index df8ea5344708d..94e9033dcca4f 100644 --- a/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java +++ b/x-pack/plugin/enrich/src/main/java/org/elasticsearch/xpack/enrich/action/EnrichShardMultiSearchAction.java @@ -55,7 +55,6 @@ import org.elasticsearch.search.SearchModule; import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xcontent.DeprecationHandler; @@ -305,7 +304,13 @@ private static BytesReference filterSource(FetchSourceContext fetchSourceContext private static SearchResponse createSearchResponse(TopDocs topDocs, SearchHit[] hits) { SearchHits searchHits = new SearchHits(hits, topDocs.totalHits, 0); return new SearchResponse( - new InternalSearchResponse(searchHits, null, null, null, false, null, 0), + searchHits, + null, + null, + false, + null, + null, + 0, null, 1, 1, diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactoryTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactoryTests.java index a26cab231f52c..5fa8659b609b1 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactoryTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichProcessorFactoryTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.ESTestCase; @@ -257,26 +256,26 @@ protected void ActionListener listener ) { assert EnrichCoordinatorProxyAction.NAME.equals(action.name()); - var emptyResponse = new SearchResponse( - new InternalSearchResponse( + requestCounter[0]++; + ActionListener.respondAndRelease( + listener, + (Response) new SearchResponse( new SearchHits(new SearchHit[0], new TotalHits(0L, TotalHits.Relation.EQUAL_TO), 0.0f), InternalAggregations.EMPTY, new Suggest(Collections.emptyList()), - new SearchProfileResults(Collections.emptyMap()), false, false, - 1 - ), - "", - 1, - 1, - 0, - 0, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY + new SearchProfileResults(Collections.emptyMap()), + 1, + "", + 1, + 1, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ) ); - requestCounter[0]++; - listener.onResponse((Response) emptyResponse); } }; EnrichProcessorFactory factory = new EnrichProcessorFactory(client, scriptService, enrichCache); diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichResiliencyTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichResiliencyTests.java index 049684a2c778d..7645760be9a10 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichResiliencyTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/EnrichResiliencyTests.java @@ -34,6 +34,7 @@ import java.util.List; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -153,7 +154,7 @@ public void testWriteThreadLivenessBackToBack() throws Exception { assertThat(firstFailure.getMessage(), containsString("Could not perform enrichment, enrich coordination queue at capacity")); client().admin().indices().refresh(new RefreshRequest(enrichedIndexName)).actionGet(); - assertEquals(successfulItems, client().search(new SearchRequest(enrichedIndexName)).actionGet().getHits().getTotalHits().value); + assertHitCount(client().search(new SearchRequest(enrichedIndexName)), successfulItems); } public void testWriteThreadLivenessWithPipeline() throws Exception { @@ -276,6 +277,6 @@ public void testWriteThreadLivenessWithPipeline() throws Exception { assertThat(firstFailure.getMessage(), containsString("Could not perform enrichment, enrich coordination queue at capacity")); client().admin().indices().refresh(new RefreshRequest(enrichedIndexName)).actionGet(); - assertEquals(successfulItems, client().search(new SearchRequest(enrichedIndexName)).actionGet().getHits().getTotalHits().value); + assertHitCount(client().search(new SearchRequest(enrichedIndexName)), successfulItems); } } diff --git a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java index 079af561e00c9..8f23dde1d939f 100644 --- a/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java +++ b/x-pack/plugin/enrich/src/test/java/org/elasticsearch/xpack/enrich/action/CoordinatorTests.java @@ -25,7 +25,6 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -373,16 +372,22 @@ public void testReduce() { } private static SearchResponse emptySearchResponse() { - InternalSearchResponse response = new InternalSearchResponse( + return new SearchResponse( new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), InternalAggregations.EMPTY, null, - null, false, null, - 1 + null, + 1, + null, + 1, + 1, + 0, + 100, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY ); - return new SearchResponse(response, null, 1, 1, 0, 100, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); } private class MockLookupFunction implements BiConsumer> { diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml index 260e1784d29e2..5a012853b4bf9 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/335_connector_update_configuration.yml @@ -85,6 +85,41 @@ setup: - match: { configuration.some_field.value: 456 } - match: { status: configured } +--- +"Update Connector Configuration with null tooltip": + - do: + connector.update_configuration: + connector_id: test-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: numeric + label: Very important field + options: [ ] + order: 4 + required: true + sensitive: false + tooltip: null + type: str + ui_restrictions: [ ] + validations: + - constraint: 0 + type: greater_than + value: 123 + + + - match: { result: updated } + + - do: + connector.get: + connector_id: test-connector + + - match: { configuration.some_field.tooltip: null } + --- "Update Connector Configuration - Connector doesn't exist": - do: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml index 0403842cb0728..582a523605663 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/400_connector_sync_job_post.yml @@ -41,8 +41,68 @@ setup: - exists: created_at - exists: last_seen +--- +'Create connector sync job with complex connector document': + + - do: + connector.update_pipeline: + connector_id: test-connector + body: + pipeline: + extract_binary_content: true + name: test-pipeline + reduce_whitespace: true + run_ml_inference: false + + - match: { result: updated } + + - do: + connector.update_configuration: + connector_id: test-connector + body: + configuration: + some_field: + default_value: null + depends_on: + - field: some_field + value: 31 + display: numeric + label: Very important field + options: [ ] + order: 4 + required: true + sensitive: false + tooltip: Wow, this tooltip is useful. + type: str + ui_restrictions: [ ] + validations: + - constraint: 0 + type: greater_than + value: 456 + + - match: { result: updated } + + - do: + connector_sync_job.post: + body: + id: test-connector + job_type: full + trigger_method: on_demand + + - set: { id: id } + + - match: { id: $id } + + - do: + connector_sync_job.get: + connector_sync_job_id: $id + + - match: { connector.id: test-connector } + - match: { connector.configuration.some_field.value: 456 } + - match: { connector.pipeline.name: test-pipeline } --- + 'Create connector sync job with missing job type - expect job type full as default': - do: connector_sync_job.post: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/420_connector_sync_job_check_in.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/420_connector_sync_job_check_in.yml index d08f7f6a51c91..caa6543b6985a 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/420_connector_sync_job_check_in.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/420_connector_sync_job_check_in.yml @@ -34,7 +34,7 @@ setup: connector_sync_job.check_in: connector_sync_job_id: $sync-job-id-to-check-in - - match: { acknowledged: true } + - match: { result: updated } - do: connector_sync_job.get: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml index d934b7c674f25..633c1a8cecb7b 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/430_connector_sync_job_cancel.yml @@ -27,7 +27,7 @@ setup: connector_sync_job.cancel: connector_sync_job_id: $sync-job-id-to-cancel - - match: { acknowledged: true } + - match: { result: updated } - do: connector_sync_job.get: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml index 6f525a2ac2883..a565d28c3e788 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/450_connector_sync_job_error.yml @@ -29,7 +29,7 @@ setup: body: error: error - - match: { acknowledged: true } + - match: { result: updated } - do: connector_sync_job.get: diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml index 0c7300bd2b436..94572870f9164 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/460_connector_sync_job_update_stats.yml @@ -32,7 +32,7 @@ setup: indexed_document_count: 20 indexed_document_volume: 1000 - - match: { acknowledged: true } + - match: { result: updated } - do: connector_sync_job.get: @@ -143,7 +143,7 @@ setup: indexed_document_volume: 1000 total_document_count: 20 - - match: { acknowledged: true } + - match: { result: updated } - do: connector_sync_job.get: @@ -173,7 +173,7 @@ setup: indexed_document_volume: 1000 last_seen: 2023-12-04T08:45:50.567149Z - - match: { acknowledged: true } + - match: { result: updated } - do: connector_sync_job.get: diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java index a1446606a21af..d9f433b8052bf 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java @@ -65,11 +65,8 @@ public class AnalyticsTemplateRegistry extends IndexTemplateRegistry { TEMPLATE_VERSION_VARIABLE ) )) { - try { - componentTemplates.put( - config.getTemplateName(), - ComponentTemplate.parse(JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, config.loadBytes())) - ); + try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, config.loadBytes())) { + componentTemplates.put(config.getTemplateName(), ComponentTemplate.parse(parser)); } catch (IOException e) { throw new AssertionError(e); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java index 11c5a44d45977..74d9be8db0fac 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/Connector.java @@ -153,7 +153,7 @@ private Connector( this.error = error; this.features = features; this.filtering = Objects.requireNonNull(filtering, "[filtering] cannot be null"); - this.indexName = Objects.requireNonNull(indexName, "[index_name] cannot be null"); + this.indexName = indexName; this.isNative = isNative; this.language = language; this.lastSeen = lastSeen; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java index 103c647f180b4..8ed7c417a1af1 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorConfiguration.java @@ -54,6 +54,7 @@ public class ConnectorConfiguration implements Writeable, ToXContentObject { private final String placeholder; private final boolean required; private final boolean sensitive; + @Nullable private final String tooltip; private final ConfigurationFieldType type; private final List uiRestrictions; @@ -199,7 +200,7 @@ public ConnectorConfiguration(StreamInput in) throws IOException { PARSER.declareString(optionalConstructorArg(), PLACEHOLDER_FIELD); PARSER.declareBoolean(constructorArg(), REQUIRED_FIELD); PARSER.declareBoolean(constructorArg(), SENSITIVE_FIELD); - PARSER.declareStringOrNull(constructorArg(), TOOLTIP_FIELD); + PARSER.declareStringOrNull(optionalConstructorArg(), TOOLTIP_FIELD); PARSER.declareField( constructorArg(), (p, c) -> ConfigurationFieldType.fieldType(p.text()), diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java index 642295061d17a..c57650541b416 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/ConnectorTemplateRegistry.java @@ -98,11 +98,8 @@ public class ConnectorTemplateRegistry extends IndexTemplateRegistry { ) )) { - try { - componentTemplates.put( - config.getTemplateName(), - ComponentTemplate.parse(JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, config.loadBytes())) - ); + try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, config.loadBytes())) { + componentTemplates.put(config.getTemplateName(), ComponentTemplate.parse(parser)); } catch (IOException e) { throw new AssertionError(e); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ConnectorUpdateActionResponse.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ConnectorUpdateActionResponse.java new file mode 100644 index 0000000000000..b77f04e7d9289 --- /dev/null +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/ConnectorUpdateActionResponse.java @@ -0,0 +1,71 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xpack.application.connector.Connector; +import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; + +import java.io.IOException; +import java.util.Objects; + +/** + * Represents a response for update actions related to {@link Connector} and {@link ConnectorSyncJob}. + * The response encapsulates the result of the update action, represented by a {@link DocWriteResponse.Result}. + */ +public class ConnectorUpdateActionResponse extends ActionResponse implements ToXContentObject { + final DocWriteResponse.Result result; + + public ConnectorUpdateActionResponse(StreamInput in) throws IOException { + super(in); + result = DocWriteResponse.Result.readFrom(in); + } + + public ConnectorUpdateActionResponse(DocWriteResponse.Result result) { + this.result = result; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + this.result.writeTo(out); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field("result", this.result.getLowercase()); + builder.endObject(); + return builder; + } + + public RestStatus status() { + return switch (result) { + case NOT_FOUND -> RestStatus.NOT_FOUND; + default -> RestStatus.OK; + }; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ConnectorUpdateActionResponse that = (ConnectorUpdateActionResponse) o; + return Objects.equals(result, that.result); + } + + @Override + public int hashCode() { + return Objects.hash(result); + } +} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java index aa46353d47999..12c96d212f77a 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorConfigurationAction.java @@ -39,7 +39,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient return channel -> client.execute( UpdateConnectorConfigurationAction.INSTANCE, request, - new RestToXContentListener<>(channel, UpdateConnectorConfigurationAction.Response::status, r -> null) + new RestToXContentListener<>(channel, ConnectorUpdateActionResponse::status) ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java index ea8bd1b4ee50f..8b4b70b994ec1 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorErrorAction.java @@ -39,7 +39,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient return channel -> client.execute( UpdateConnectorErrorAction.INSTANCE, request, - new RestToXContentListener<>(channel, UpdateConnectorErrorAction.Response::status, r -> null) + new RestToXContentListener<>(channel, ConnectorUpdateActionResponse::status) ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringAction.java index 63ae3e81fe563..4908e9e09d73f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorFilteringAction.java @@ -39,7 +39,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient return channel -> client.execute( UpdateConnectorFilteringAction.INSTANCE, request, - new RestToXContentListener<>(channel, UpdateConnectorFilteringAction.Response::status, r -> null) + new RestToXContentListener<>(channel, ConnectorUpdateActionResponse::status) ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java index b2ebaa74984b1..c2c6ee12a7767 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSeenAction.java @@ -35,7 +35,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient return channel -> client.execute( UpdateConnectorLastSeenAction.INSTANCE, request, - new RestToXContentListener<>(channel, UpdateConnectorLastSeenAction.Response::status, r -> null) + new RestToXContentListener<>(channel, ConnectorUpdateActionResponse::status) ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java index 8e373ce48caf3..ff3ba53e34a9d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorLastSyncStatsAction.java @@ -39,7 +39,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient return channel -> client.execute( UpdateConnectorLastSyncStatsAction.INSTANCE, request, - new RestToXContentListener<>(channel, UpdateConnectorLastSyncStatsAction.Response::status, r -> null) + new RestToXContentListener<>(channel, ConnectorUpdateActionResponse::status) ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java index 54ce2c9af79e8..c51744e57b1df 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorNameAction.java @@ -39,7 +39,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient return channel -> client.execute( UpdateConnectorNameAction.INSTANCE, request, - new RestToXContentListener<>(channel, UpdateConnectorNameAction.Response::status, r -> null) + new RestToXContentListener<>(channel, ConnectorUpdateActionResponse::status) ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorPipelineAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorPipelineAction.java index ba83bd42dac11..8192099b832dd 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorPipelineAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorPipelineAction.java @@ -39,7 +39,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient return channel -> client.execute( UpdateConnectorPipelineAction.INSTANCE, request, - new RestToXContentListener<>(channel, UpdateConnectorPipelineAction.Response::status, r -> null) + new RestToXContentListener<>(channel, ConnectorUpdateActionResponse::status) ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorSchedulingAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorSchedulingAction.java index 06a6cb527544e..fda9fa03af913 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorSchedulingAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/RestUpdateConnectorSchedulingAction.java @@ -39,7 +39,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient return channel -> client.execute( UpdateConnectorSchedulingAction.INSTANCE, request, - new RestToXContentListener<>(channel, UpdateConnectorSchedulingAction.Response::status, r -> null) + new RestToXContentListener<>(channel, ConnectorUpdateActionResponse::status) ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorConfigurationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorConfigurationAction.java index 211c3b5a3a670..d4a7e0cf58df2 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorConfigurationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorConfigurationAction.java @@ -20,7 +20,7 @@ public class TransportUpdateConnectorConfigurationAction extends HandledTransportAction< UpdateConnectorConfigurationAction.Request, - UpdateConnectorConfigurationAction.Response> { + ConnectorUpdateActionResponse> { protected final ConnectorIndexService connectorIndexService; @@ -45,11 +45,8 @@ public TransportUpdateConnectorConfigurationAction( protected void doExecute( Task task, UpdateConnectorConfigurationAction.Request request, - ActionListener listener + ActionListener listener ) { - connectorIndexService.updateConnectorConfiguration( - request, - listener.map(r -> new UpdateConnectorConfigurationAction.Response(r.getResult())) - ); + connectorIndexService.updateConnectorConfiguration(request, listener.map(r -> new ConnectorUpdateActionResponse(r.getResult()))); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorErrorAction.java index 629fd14861cf6..5d9be2ec93f45 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorErrorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorErrorAction.java @@ -20,7 +20,7 @@ public class TransportUpdateConnectorErrorAction extends HandledTransportAction< UpdateConnectorErrorAction.Request, - UpdateConnectorErrorAction.Response> { + ConnectorUpdateActionResponse> { protected final ConnectorIndexService connectorIndexService; @@ -45,8 +45,8 @@ public TransportUpdateConnectorErrorAction( protected void doExecute( Task task, UpdateConnectorErrorAction.Request request, - ActionListener listener + ActionListener listener ) { - connectorIndexService.updateConnectorError(request, listener.map(r -> new UpdateConnectorErrorAction.Response(r.getResult()))); + connectorIndexService.updateConnectorError(request, listener.map(r -> new ConnectorUpdateActionResponse(r.getResult()))); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFilteringAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFilteringAction.java index e871eb4bb79e5..658a8075121af 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFilteringAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorFilteringAction.java @@ -20,7 +20,7 @@ public class TransportUpdateConnectorFilteringAction extends HandledTransportAction< UpdateConnectorFilteringAction.Request, - UpdateConnectorFilteringAction.Response> { + ConnectorUpdateActionResponse> { protected final ConnectorIndexService connectorIndexService; @@ -45,11 +45,8 @@ public TransportUpdateConnectorFilteringAction( protected void doExecute( Task task, UpdateConnectorFilteringAction.Request request, - ActionListener listener + ActionListener listener ) { - connectorIndexService.updateConnectorFiltering( - request, - listener.map(r -> new UpdateConnectorFilteringAction.Response(r.getResult())) - ); + connectorIndexService.updateConnectorFiltering(request, listener.map(r -> new ConnectorUpdateActionResponse(r.getResult()))); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java index 3d3d2c9ee04b7..60c75bce8314a 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSeenAction.java @@ -20,7 +20,7 @@ public class TransportUpdateConnectorLastSeenAction extends HandledTransportAction< UpdateConnectorLastSeenAction.Request, - UpdateConnectorLastSeenAction.Response> { + ConnectorUpdateActionResponse> { protected final ConnectorIndexService connectorIndexService; @@ -45,11 +45,8 @@ public TransportUpdateConnectorLastSeenAction( protected void doExecute( Task task, UpdateConnectorLastSeenAction.Request request, - ActionListener listener + ActionListener listener ) { - connectorIndexService.updateConnectorLastSeen( - request, - listener.map(r -> new UpdateConnectorLastSeenAction.Response(r.getResult())) - ); + connectorIndexService.updateConnectorLastSeen(request, listener.map(r -> new ConnectorUpdateActionResponse(r.getResult()))); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSyncStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSyncStatsAction.java index 9ec0105668fbc..ad934b04c772e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSyncStatsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorLastSyncStatsAction.java @@ -20,7 +20,7 @@ public class TransportUpdateConnectorLastSyncStatsAction extends HandledTransportAction< UpdateConnectorLastSyncStatsAction.Request, - UpdateConnectorLastSyncStatsAction.Response> { + ConnectorUpdateActionResponse> { protected final ConnectorIndexService connectorIndexService; @@ -45,11 +45,8 @@ public TransportUpdateConnectorLastSyncStatsAction( protected void doExecute( Task task, UpdateConnectorLastSyncStatsAction.Request request, - ActionListener listener + ActionListener listener ) { - connectorIndexService.updateConnectorLastSyncStats( - request, - listener.map(r -> new UpdateConnectorLastSyncStatsAction.Response(r.getResult())) - ); + connectorIndexService.updateConnectorLastSyncStats(request, listener.map(r -> new ConnectorUpdateActionResponse(r.getResult()))); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorNameAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorNameAction.java index 252734aab1c51..db79ed7be2836 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorNameAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorNameAction.java @@ -20,7 +20,7 @@ public class TransportUpdateConnectorNameAction extends HandledTransportAction< UpdateConnectorNameAction.Request, - UpdateConnectorNameAction.Response> { + ConnectorUpdateActionResponse> { protected final ConnectorIndexService connectorIndexService; @@ -42,14 +42,10 @@ public TransportUpdateConnectorNameAction( } @Override - protected void doExecute( - Task task, - UpdateConnectorNameAction.Request request, - ActionListener listener - ) { + protected void doExecute(Task task, UpdateConnectorNameAction.Request request, ActionListener listener) { connectorIndexService.updateConnectorNameOrDescription( request, - listener.map(r -> new UpdateConnectorNameAction.Response(r.getResult())) + listener.map(r -> new ConnectorUpdateActionResponse(r.getResult())) ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorPipelineAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorPipelineAction.java index c54d3db1215bc..11862d568c4ff 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorPipelineAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorPipelineAction.java @@ -20,7 +20,7 @@ public class TransportUpdateConnectorPipelineAction extends HandledTransportAction< UpdateConnectorPipelineAction.Request, - UpdateConnectorPipelineAction.Response> { + ConnectorUpdateActionResponse> { protected final ConnectorIndexService connectorIndexService; @@ -45,11 +45,8 @@ public TransportUpdateConnectorPipelineAction( protected void doExecute( Task task, UpdateConnectorPipelineAction.Request request, - ActionListener listener + ActionListener listener ) { - connectorIndexService.updateConnectorPipeline( - request, - listener.map(r -> new UpdateConnectorPipelineAction.Response(r.getResult())) - ); + connectorIndexService.updateConnectorPipeline(request, listener.map(r -> new ConnectorUpdateActionResponse(r.getResult()))); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorSchedulingAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorSchedulingAction.java index 186edb2328f38..bb5e1f858bd94 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorSchedulingAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/TransportUpdateConnectorSchedulingAction.java @@ -20,7 +20,7 @@ public class TransportUpdateConnectorSchedulingAction extends HandledTransportAction< UpdateConnectorSchedulingAction.Request, - UpdateConnectorSchedulingAction.Response> { + ConnectorUpdateActionResponse> { protected final ConnectorIndexService connectorIndexService; @@ -45,11 +45,8 @@ public TransportUpdateConnectorSchedulingAction( protected void doExecute( Task task, UpdateConnectorSchedulingAction.Request request, - ActionListener listener + ActionListener listener ) { - connectorIndexService.updateConnectorScheduling( - request, - listener.map(r -> new UpdateConnectorSchedulingAction.Response(r.getResult())) - ); + connectorIndexService.updateConnectorScheduling(request, listener.map(r -> new ConnectorUpdateActionResponse(r.getResult()))); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java index 6b5f52f3afda7..19e7628746485 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationAction.java @@ -10,15 +10,12 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ToXContentObject; @@ -37,13 +34,13 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class UpdateConnectorConfigurationAction extends ActionType { +public class UpdateConnectorConfigurationAction extends ActionType { public static final UpdateConnectorConfigurationAction INSTANCE = new UpdateConnectorConfigurationAction(); public static final String NAME = "cluster:admin/xpack/connector/update_configuration"; public UpdateConnectorConfigurationAction() { - super(NAME, UpdateConnectorConfigurationAction.Response::new); + super(NAME, ConnectorUpdateActionResponse::new); } public static class Request extends ActionRequest implements ToXContentObject { @@ -152,51 +149,4 @@ public int hashCode() { return Objects.hash(connectorId, configuration); } } - - public static class Response extends ActionResponse implements ToXContentObject { - - final DocWriteResponse.Result result; - - public Response(StreamInput in) throws IOException { - super(in); - result = DocWriteResponse.Result.readFrom(in); - } - - public Response(DocWriteResponse.Result result) { - this.result = result; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - this.result.writeTo(out); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("result", this.result.getLowercase()); - builder.endObject(); - return builder; - } - - public RestStatus status() { - return switch (result) { - case NOT_FOUND -> RestStatus.NOT_FOUND; - default -> RestStatus.OK; - }; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Response that = (Response) o; - return Objects.equals(result, that.result); - } - - @Override - public int hashCode() { - return Objects.hash(result); - } - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java index c9e48dac08cd5..ad2036ecbaf81 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorAction.java @@ -10,16 +10,13 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -34,13 +31,13 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class UpdateConnectorErrorAction extends ActionType { +public class UpdateConnectorErrorAction extends ActionType { public static final UpdateConnectorErrorAction INSTANCE = new UpdateConnectorErrorAction(); public static final String NAME = "cluster:admin/xpack/connector/update_error"; public UpdateConnectorErrorAction() { - super(NAME, UpdateConnectorErrorAction.Response::new); + super(NAME, ConnectorUpdateActionResponse::new); } public static class Request extends ActionRequest implements ToXContentObject { @@ -136,51 +133,4 @@ public int hashCode() { return Objects.hash(connectorId, error); } } - - public static class Response extends ActionResponse implements ToXContentObject { - - final DocWriteResponse.Result result; - - public Response(StreamInput in) throws IOException { - super(in); - result = DocWriteResponse.Result.readFrom(in); - } - - public Response(DocWriteResponse.Result result) { - this.result = result; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - this.result.writeTo(out); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("result", this.result.getLowercase()); - builder.endObject(); - return builder; - } - - public RestStatus status() { - return switch (result) { - case NOT_FOUND -> RestStatus.NOT_FOUND; - default -> RestStatus.OK; - }; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Response that = (Response) o; - return Objects.equals(result, that.result); - } - - @Override - public int hashCode() { - return Objects.hash(result); - } - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java index 68c644cb9d9db..dabb87f2afc22 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringAction.java @@ -10,15 +10,12 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -35,13 +32,13 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class UpdateConnectorFilteringAction extends ActionType { +public class UpdateConnectorFilteringAction extends ActionType { public static final UpdateConnectorFilteringAction INSTANCE = new UpdateConnectorFilteringAction(); public static final String NAME = "cluster:admin/xpack/connector/update_filtering"; public UpdateConnectorFilteringAction() { - super(NAME, UpdateConnectorFilteringAction.Response::new); + super(NAME, ConnectorUpdateActionResponse::new); } public static class Request extends ActionRequest implements ToXContentObject { @@ -141,51 +138,4 @@ public int hashCode() { return Objects.hash(connectorId, filtering); } } - - public static class Response extends ActionResponse implements ToXContentObject { - - final DocWriteResponse.Result result; - - public Response(StreamInput in) throws IOException { - super(in); - result = DocWriteResponse.Result.readFrom(in); - } - - public Response(DocWriteResponse.Result result) { - this.result = result; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - this.result.writeTo(out); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("result", this.result.getLowercase()); - builder.endObject(); - return builder; - } - - public RestStatus status() { - return switch (result) { - case NOT_FOUND -> RestStatus.NOT_FOUND; - default -> RestStatus.OK; - }; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Response that = (Response) o; - return Objects.equals(result, that.result); - } - - @Override - public int hashCode() { - return Objects.hash(result); - } - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java index 976be76ba84af..bd20513e47033 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenAction.java @@ -9,13 +9,10 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xpack.application.connector.Connector; @@ -26,13 +23,13 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; -public class UpdateConnectorLastSeenAction extends ActionType { +public class UpdateConnectorLastSeenAction extends ActionType { public static final UpdateConnectorLastSeenAction INSTANCE = new UpdateConnectorLastSeenAction(); public static final String NAME = "cluster:admin/xpack/connector/update_last_seen"; public UpdateConnectorLastSeenAction() { - super(NAME, UpdateConnectorLastSeenAction.Response::new); + super(NAME, ConnectorUpdateActionResponse::new); } public static class Request extends ActionRequest implements ToXContentObject { @@ -97,51 +94,4 @@ public int hashCode() { return Objects.hash(connectorId, lastSeen); } } - - public static class Response extends ActionResponse implements ToXContentObject { - - final DocWriteResponse.Result result; - - public Response(StreamInput in) throws IOException { - super(in); - result = DocWriteResponse.Result.readFrom(in); - } - - public Response(DocWriteResponse.Result result) { - this.result = result; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - this.result.writeTo(out); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("result", this.result.getLowercase()); - builder.endObject(); - return builder; - } - - public RestStatus status() { - return switch (result) { - case NOT_FOUND -> RestStatus.NOT_FOUND; - default -> RestStatus.OK; - }; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Response that = (Response) o; - return Objects.equals(result, that.result); - } - - @Override - public int hashCode() { - return Objects.hash(result); - } - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java index 328831cf0b840..7d82c28ca4af1 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsAction.java @@ -10,15 +10,12 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ObjectParser; import org.elasticsearch.xcontent.ToXContentObject; @@ -36,13 +33,13 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class UpdateConnectorLastSyncStatsAction extends ActionType { +public class UpdateConnectorLastSyncStatsAction extends ActionType { public static final UpdateConnectorLastSyncStatsAction INSTANCE = new UpdateConnectorLastSyncStatsAction(); public static final String NAME = "cluster:admin/xpack/connector/update_last_sync_stats"; public UpdateConnectorLastSyncStatsAction() { - super(NAME, UpdateConnectorLastSyncStatsAction.Response::new); + super(NAME, ConnectorUpdateActionResponse::new); } public static class Request extends ActionRequest implements ToXContentObject { @@ -190,51 +187,4 @@ public int hashCode() { return Objects.hash(connectorId, syncInfo); } } - - public static class Response extends ActionResponse implements ToXContentObject { - - final DocWriteResponse.Result result; - - public Response(StreamInput in) throws IOException { - super(in); - result = DocWriteResponse.Result.readFrom(in); - } - - public Response(DocWriteResponse.Result result) { - this.result = result; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - this.result.writeTo(out); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("result", this.result.getLowercase()); - builder.endObject(); - return builder; - } - - public RestStatus status() { - return switch (result) { - case NOT_FOUND -> RestStatus.NOT_FOUND; - default -> RestStatus.OK; - }; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Response that = (Response) o; - return Objects.equals(result, that.result); - } - - @Override - public int hashCode() { - return Objects.hash(result); - } - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java index 1db9bbe3aad9d..6b5c580e396ad 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameAction.java @@ -10,16 +10,13 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.core.Nullable; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -35,13 +32,13 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; -public class UpdateConnectorNameAction extends ActionType { +public class UpdateConnectorNameAction extends ActionType { public static final UpdateConnectorNameAction INSTANCE = new UpdateConnectorNameAction(); public static final String NAME = "cluster:admin/xpack/connector/update_name"; public UpdateConnectorNameAction() { - super(NAME, UpdateConnectorNameAction.Response::new); + super(NAME, ConnectorUpdateActionResponse::new); } public static class Request extends ActionRequest implements ToXContentObject { @@ -159,51 +156,4 @@ public int hashCode() { return Objects.hash(connectorId, name, description); } } - - public static class Response extends ActionResponse implements ToXContentObject { - - final DocWriteResponse.Result result; - - public Response(StreamInput in) throws IOException { - super(in); - result = DocWriteResponse.Result.readFrom(in); - } - - public Response(DocWriteResponse.Result result) { - this.result = result; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - this.result.writeTo(out); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("result", this.result.getLowercase()); - builder.endObject(); - return builder; - } - - public RestStatus status() { - return switch (result) { - case NOT_FOUND -> RestStatus.NOT_FOUND; - default -> RestStatus.OK; - }; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Response that = (Response) o; - return Objects.equals(result, that.result); - } - - @Override - public int hashCode() { - return Objects.hash(result); - } - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java index 68babb2d4b517..ba5b0e702bf0e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineAction.java @@ -10,15 +10,12 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -34,13 +31,13 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class UpdateConnectorPipelineAction extends ActionType { +public class UpdateConnectorPipelineAction extends ActionType { public static final UpdateConnectorPipelineAction INSTANCE = new UpdateConnectorPipelineAction(); public static final String NAME = "cluster:admin/xpack/connector/update_pipeline"; public UpdateConnectorPipelineAction() { - super(NAME, UpdateConnectorPipelineAction.Response::new); + super(NAME, ConnectorUpdateActionResponse::new); } public static class Request extends ActionRequest implements ToXContentObject { @@ -139,52 +136,4 @@ public int hashCode() { return Objects.hash(connectorId, pipeline); } } - - public static class Response extends ActionResponse implements ToXContentObject { - - final DocWriteResponse.Result result; - - public Response(StreamInput in) throws IOException { - super(in); - result = DocWriteResponse.Result.readFrom(in); - } - - public Response(DocWriteResponse.Result result) { - this.result = result; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - this.result.writeTo(out); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("result", this.result.getLowercase()); - builder.endObject(); - return builder; - } - - public RestStatus status() { - return switch (result) { - case NOT_FOUND -> RestStatus.NOT_FOUND; - default -> RestStatus.OK; - }; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Response that = (Response) o; - return Objects.equals(result, that.result); - } - - @Override - public int hashCode() { - return Objects.hash(result); - } - - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java index 9867830c5d211..df76e9a09547a 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingAction.java @@ -10,15 +10,12 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.ConstructingObjectParser; import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; @@ -34,13 +31,13 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class UpdateConnectorSchedulingAction extends ActionType { +public class UpdateConnectorSchedulingAction extends ActionType { public static final UpdateConnectorSchedulingAction INSTANCE = new UpdateConnectorSchedulingAction(); public static final String NAME = "cluster:admin/xpack/connector/update_scheduling"; public UpdateConnectorSchedulingAction() { - super(NAME, UpdateConnectorSchedulingAction.Response::new); + super(NAME, ConnectorUpdateActionResponse::new); } public static class Request extends ActionRequest implements ToXContentObject { @@ -139,51 +136,4 @@ public int hashCode() { return Objects.hash(connectorId, scheduling); } } - - public static class Response extends ActionResponse implements ToXContentObject { - - final DocWriteResponse.Result result; - - public Response(StreamInput in) throws IOException { - super(in); - result = DocWriteResponse.Result.readFrom(in); - } - - public Response(DocWriteResponse.Result result) { - this.result = result; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - this.result.writeTo(out); - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - builder.field("result", this.result.getLowercase()); - builder.endObject(); - return builder; - } - - public RestStatus status() { - return switch (result) { - case NOT_FOUND -> RestStatus.NOT_FOUND; - default -> RestStatus.OK; - }; - } - - @Override - public boolean equals(Object o) { - if (this == o) return true; - if (o == null || getClass() != o.getClass()) return false; - Response that = (Response) o; - return Objects.equals(result, that.result); - } - - @Override - public int hashCode() { - return Objects.hash(result); - } - } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDisplayType.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDisplayType.java index d6b3d83d705b9..df8dee04d61b9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDisplayType.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/configuration/ConfigurationDisplayType.java @@ -10,6 +10,7 @@ import java.util.Locale; public enum ConfigurationDisplayType { + TEXT, TEXTBOX, TEXTAREA, NUMERIC, diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java index c63cb1921adc6..84d91b7fe0f08 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJob.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.application.connector.syncjob; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -31,6 +32,7 @@ import java.io.IOException; import java.time.Instant; import java.util.Collections; +import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; @@ -281,7 +283,7 @@ public ConnectorSyncJob(StreamInput in) throws IOException { ); PARSER.declareField( constructorArg(), - (p, c) -> ConnectorSyncJob.syncJobConnectorFromXContent(p), + (p, c) -> ConnectorSyncJob.syncJobConnectorFromXContent(p, null), CONNECTOR_FIELD, ObjectParser.ValueType.OBJECT ); @@ -326,12 +328,21 @@ private static Instant parseNullableInstant(XContentParser p) throws IOException } @SuppressWarnings("unchecked") - private static final ConstructingObjectParser SYNC_JOB_CONNECTOR_PARSER = new ConstructingObjectParser<>( + private static final ConstructingObjectParser SYNC_JOB_CONNECTOR_PARSER = new ConstructingObjectParser<>( "sync_job_connector", true, - (args) -> { + (args, connectorId) -> { int i = 0; - return new Connector.Builder().setConnectorId((String) args[i++]) + + // Parse the connector ID from the arguments. The ID uniquely identifies the connector. + String parsedConnectorId = (String) args[i++]; + + // Determine the actual connector ID to use. If the context parameter `connectorId` is not null or empty, + // it takes precedence over the `parsedConnectorId` extracted from the arguments. + // This approach allows for flexibility in specifying the connector ID, either from a context or as a parsed argument. + String syncJobConnectorId = Strings.isNullOrEmpty(connectorId) ? parsedConnectorId : connectorId; + + return new Connector.Builder().setConnectorId(syncJobConnectorId) .setFiltering((List) args[i++]) .setIndexName((String) args[i++]) .setLanguage((String) args[i++]) @@ -343,26 +354,25 @@ private static Instant parseNullableInstant(XContentParser p) throws IOException ); static { - SYNC_JOB_CONNECTOR_PARSER.declareString(constructorArg(), Connector.ID_FIELD); + SYNC_JOB_CONNECTOR_PARSER.declareString(optionalConstructorArg(), Connector.ID_FIELD); SYNC_JOB_CONNECTOR_PARSER.declareObjectArray( optionalConstructorArg(), (p, c) -> ConnectorFiltering.fromXContent(p), Connector.FILTERING_FIELD ); - SYNC_JOB_CONNECTOR_PARSER.declareString(optionalConstructorArg(), Connector.INDEX_NAME_FIELD); - SYNC_JOB_CONNECTOR_PARSER.declareString(optionalConstructorArg(), Connector.LANGUAGE_FIELD); - SYNC_JOB_CONNECTOR_PARSER.declareField( + SYNC_JOB_CONNECTOR_PARSER.declareStringOrNull(optionalConstructorArg(), Connector.INDEX_NAME_FIELD); + SYNC_JOB_CONNECTOR_PARSER.declareStringOrNull(optionalConstructorArg(), Connector.LANGUAGE_FIELD); + SYNC_JOB_CONNECTOR_PARSER.declareObjectOrNull( optionalConstructorArg(), (p, c) -> ConnectorIngestPipeline.fromXContent(p), - Connector.PIPELINE_FIELD, - ObjectParser.ValueType.OBJECT + null, + Connector.PIPELINE_FIELD ); - SYNC_JOB_CONNECTOR_PARSER.declareString(optionalConstructorArg(), Connector.SERVICE_TYPE_FIELD); - SYNC_JOB_CONNECTOR_PARSER.declareField( + SYNC_JOB_CONNECTOR_PARSER.declareStringOrNull(optionalConstructorArg(), Connector.SERVICE_TYPE_FIELD); + SYNC_JOB_CONNECTOR_PARSER.declareObject( optionalConstructorArg(), - (parser, context) -> parser.map(), - Connector.CONFIGURATION_FIELD, - ObjectParser.ValueType.OBJECT + (p, c) -> p.map(HashMap::new, ConnectorConfiguration::fromXContent), + Connector.CONFIGURATION_FIELD ); } @@ -378,8 +388,16 @@ public static ConnectorSyncJob fromXContent(XContentParser parser) throws IOExce return PARSER.parse(parser, null); } - public static Connector syncJobConnectorFromXContent(XContentParser parser) throws IOException { - return SYNC_JOB_CONNECTOR_PARSER.parse(parser, null); + public static Connector syncJobConnectorFromXContentBytes(BytesReference source, String connectorId, XContentType xContentType) { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, source, xContentType)) { + return ConnectorSyncJob.syncJobConnectorFromXContent(parser, connectorId); + } catch (IOException e) { + throw new ElasticsearchParseException("Failed to parse a connector document.", e); + } + } + + public static Connector syncJobConnectorFromXContent(XContentParser parser, String connectorId) throws IOException { + return SYNC_JOB_CONNECTOR_PARSER.parse(parser, connectorId); } public String getId() { @@ -470,7 +488,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.startObject(CONNECTOR_FIELD.getPreferredName()); { - builder.field(Connector.ID_FIELD.getPreferredName(), connector.getConnectorId()); + if (connector.getConnectorId() != null) { + builder.field(Connector.ID_FIELD.getPreferredName(), connector.getConnectorId()); + } if (connector.getFiltering() != null) { builder.field(Connector.FILTERING_FIELD.getPreferredName(), connector.getFiltering()); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java index a7d20414d4631..ee35d8fb6372c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobIndexService.java @@ -38,10 +38,7 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.application.connector.Connector; -import org.elasticsearch.xpack.application.connector.ConnectorConfiguration; -import org.elasticsearch.xpack.application.connector.ConnectorFiltering; import org.elasticsearch.xpack.application.connector.ConnectorIndexService; -import org.elasticsearch.xpack.application.connector.ConnectorIngestPipeline; import org.elasticsearch.xpack.application.connector.ConnectorSyncStatus; import org.elasticsearch.xpack.application.connector.ConnectorTemplateRegistry; import org.elasticsearch.xpack.application.connector.syncjob.action.PostConnectorSyncJobAction; @@ -429,22 +426,16 @@ public void onResponse(GetResponse response) { onFailure(new ResourceNotFoundException("Connector with id '" + connectorId + "' does not exist.")); return; } - - Map source = response.getSource(); - - @SuppressWarnings("unchecked") - final Connector syncJobConnectorInfo = new Connector.Builder().setConnectorId(connectorId) - .setFiltering((List) source.get(Connector.FILTERING_FIELD.getPreferredName())) - .setIndexName((String) source.get(Connector.INDEX_NAME_FIELD.getPreferredName())) - .setLanguage((String) source.get(Connector.LANGUAGE_FIELD.getPreferredName())) - .setPipeline((ConnectorIngestPipeline) source.get(Connector.PIPELINE_FIELD.getPreferredName())) - .setServiceType((String) source.get(Connector.SERVICE_TYPE_FIELD.getPreferredName())) - .setConfiguration( - (Map) source.get(Connector.CONFIGURATION_FIELD.getPreferredName()) - ) - .build(); - - listener.onResponse(syncJobConnectorInfo); + try { + final Connector syncJobConnectorInfo = ConnectorSyncJob.syncJobConnectorFromXContentBytes( + response.getSourceAsBytesRef(), + connectorId, + XContentType.JSON + ); + listener.onResponse(syncJobConnectorInfo); + } catch (Exception e) { + listener.onFailure(e); + } } @Override diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobAction.java index 7179bbb3a62f2..111828680455c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CancelConnectorSyncJobAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -19,6 +18,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; import java.io.IOException; import java.util.Objects; @@ -27,13 +27,13 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE; -public class CancelConnectorSyncJobAction extends ActionType { +public class CancelConnectorSyncJobAction extends ActionType { public static final CancelConnectorSyncJobAction INSTANCE = new CancelConnectorSyncJobAction(); public static final String NAME = "cluster:admin/xpack/connector/sync_job/cancel"; private CancelConnectorSyncJobAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME, ConnectorUpdateActionResponse::new); } public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobAction.java index 3e5e1578cd54d..54ba26ec1533a 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/CheckInConnectorSyncJobAction.java @@ -10,7 +10,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -19,6 +18,7 @@ import org.elasticsearch.xcontent.ToXContentObject; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants; import java.io.IOException; @@ -27,13 +27,13 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class CheckInConnectorSyncJobAction extends ActionType { +public class CheckInConnectorSyncJobAction extends ActionType { public static final CheckInConnectorSyncJobAction INSTANCE = new CheckInConnectorSyncJobAction(); public static final String NAME = "cluster:admin/xpack/connector/sync_job/check_in"; private CheckInConnectorSyncJobAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME, ConnectorUpdateActionResponse::new); } public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCancelConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCancelConnectorSyncJobAction.java index 82d679c6f0ad0..7cfce07aca48d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCancelConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCancelConnectorSyncJobAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; +import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; import java.io.IOException; import java.util.List; @@ -42,6 +43,10 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient CancelConnectorSyncJobAction.Request request = new CancelConnectorSyncJobAction.Request( restRequest.param(CONNECTOR_SYNC_JOB_ID_PARAM) ); - return restChannel -> client.execute(CancelConnectorSyncJobAction.INSTANCE, request, new RestToXContentListener<>(restChannel)); + return restChannel -> client.execute( + CancelConnectorSyncJobAction.INSTANCE, + request, + new RestToXContentListener<>(restChannel, ConnectorUpdateActionResponse::status) + ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCheckInConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCheckInConnectorSyncJobAction.java index 86f97f4c5fdb4..882227e45169a 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCheckInConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestCheckInConnectorSyncJobAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; +import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; import java.io.IOException; import java.util.List; @@ -41,6 +42,10 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient restRequest.param(CONNECTOR_SYNC_JOB_ID_PARAM) ); - return restChannel -> client.execute(CheckInConnectorSyncJobAction.INSTANCE, request, new RestToXContentListener<>(restChannel)); + return restChannel -> client.execute( + CheckInConnectorSyncJobAction.INSTANCE, + request, + new RestToXContentListener<>(restChannel, ConnectorUpdateActionResponse::status) + ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java index e19a9675beebb..a05be4a92e6e3 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobErrorAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; +import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; import java.io.IOException; import java.util.List; @@ -46,7 +47,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient return restChannel -> client.execute( UpdateConnectorSyncJobErrorAction.INSTANCE, request, - new RestToXContentListener<>(restChannel) + new RestToXContentListener<>(restChannel, ConnectorUpdateActionResponse::status) ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java index aedd1605b8bfb..57a362b55ee9b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/RestUpdateConnectorSyncJobIngestionStatsAction.java @@ -10,9 +10,9 @@ import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.application.EnterpriseSearch; +import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; import java.io.IOException; import java.util.List; @@ -46,7 +46,7 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient return channel -> client.execute( UpdateConnectorSyncJobIngestionStatsAction.INSTANCE, request, - new RestToXContentListener<>(channel, r -> RestStatus.OK) + new RestToXContentListener<>(channel, ConnectorUpdateActionResponse::status) ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobAction.java index ac61dcdf08a61..b0d4628e06202 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCancelConnectorSyncJobAction.java @@ -10,18 +10,18 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; public class TransportCancelConnectorSyncJobAction extends HandledTransportAction< CancelConnectorSyncJobAction.Request, - AcknowledgedResponse> { + ConnectorUpdateActionResponse> { protected ConnectorSyncJobIndexService connectorSyncJobIndexService; @@ -43,7 +43,14 @@ public TransportCancelConnectorSyncJobAction( } @Override - protected void doExecute(Task task, CancelConnectorSyncJobAction.Request request, ActionListener listener) { - connectorSyncJobIndexService.cancelConnectorSyncJob(request.getConnectorSyncJobId(), listener.map(r -> AcknowledgedResponse.TRUE)); + protected void doExecute( + Task task, + CancelConnectorSyncJobAction.Request request, + ActionListener listener + ) { + connectorSyncJobIndexService.cancelConnectorSyncJob( + request.getConnectorSyncJobId(), + listener.map(r -> new ConnectorUpdateActionResponse(r.getResult())) + ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobAction.java index ebaadc80f4c27..9b57b090c3bd7 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportCheckInConnectorSyncJobAction.java @@ -10,18 +10,18 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; public class TransportCheckInConnectorSyncJobAction extends HandledTransportAction< CheckInConnectorSyncJobAction.Request, - AcknowledgedResponse> { + ConnectorUpdateActionResponse> { protected final ConnectorSyncJobIndexService connectorSyncJobIndexService; @@ -43,7 +43,14 @@ public TransportCheckInConnectorSyncJobAction( } @Override - protected void doExecute(Task task, CheckInConnectorSyncJobAction.Request request, ActionListener listener) { - connectorSyncJobIndexService.checkInConnectorSyncJob(request.getConnectorSyncJobId(), listener.map(r -> AcknowledgedResponse.TRUE)); + protected void doExecute( + Task task, + CheckInConnectorSyncJobAction.Request request, + ActionListener listener + ) { + connectorSyncJobIndexService.checkInConnectorSyncJob( + request.getConnectorSyncJobId(), + listener.map(r -> new ConnectorUpdateActionResponse(r.getResult())) + ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorAction.java index c814092f2e7a2..d70d5cdd956fa 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobErrorAction.java @@ -10,18 +10,18 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; public class TransportUpdateConnectorSyncJobErrorAction extends HandledTransportAction< UpdateConnectorSyncJobErrorAction.Request, - AcknowledgedResponse> { + ConnectorUpdateActionResponse> { protected final ConnectorSyncJobIndexService connectorSyncJobIndexService; @@ -43,11 +43,15 @@ public TransportUpdateConnectorSyncJobErrorAction( } @Override - protected void doExecute(Task task, UpdateConnectorSyncJobErrorAction.Request request, ActionListener listener) { + protected void doExecute( + Task task, + UpdateConnectorSyncJobErrorAction.Request request, + ActionListener listener + ) { connectorSyncJobIndexService.updateConnectorSyncJobError( request.getConnectorSyncJobId(), request.getError(), - listener.map(r -> AcknowledgedResponse.TRUE) + listener.map(r -> new ConnectorUpdateActionResponse(r.getResult())) ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsAction.java index 864da6ca3095b..49c722b84e63b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/TransportUpdateConnectorSyncJobIngestionStatsAction.java @@ -10,18 +10,18 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobIndexService; public class TransportUpdateConnectorSyncJobIngestionStatsAction extends HandledTransportAction< UpdateConnectorSyncJobIngestionStatsAction.Request, - AcknowledgedResponse> { + ConnectorUpdateActionResponse> { protected final ConnectorSyncJobIndexService connectorSyncJobIndexService; @@ -46,8 +46,11 @@ public TransportUpdateConnectorSyncJobIngestionStatsAction( protected void doExecute( Task task, UpdateConnectorSyncJobIngestionStatsAction.Request request, - ActionListener listener + ActionListener listener ) { - connectorSyncJobIndexService.updateConnectorSyncJobIngestionStats(request, listener.map(r -> AcknowledgedResponse.TRUE)); + connectorSyncJobIndexService.updateConnectorSyncJobIngestionStats( + request, + listener.map(r -> new ConnectorUpdateActionResponse(r.getResult())) + ); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java index 820630bccee03..fe0893c82e27d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobErrorAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -23,6 +22,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants; @@ -32,14 +32,14 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -public class UpdateConnectorSyncJobErrorAction extends ActionType { +public class UpdateConnectorSyncJobErrorAction extends ActionType { public static final UpdateConnectorSyncJobErrorAction INSTANCE = new UpdateConnectorSyncJobErrorAction(); public static final String NAME = "cluster:admin/xpack/connector/sync_job/update_error"; public static final String ERROR_EMPTY_MESSAGE = "[error] of the connector sync job cannot be null or empty"; private UpdateConnectorSyncJobErrorAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME, ConnectorUpdateActionResponse::new); } public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java index 34d8be2af4881..b9c57cb6a0c61 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/connector/syncjob/action/UpdateConnectorSyncJobIngestionStatsAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionType; -import org.elasticsearch.action.support.master.AcknowledgedResponse; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.StreamInput; @@ -25,6 +24,7 @@ import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.application.connector.action.ConnectorUpdateActionResponse; import org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJob; import java.io.IOException; @@ -36,13 +36,13 @@ import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; import static org.elasticsearch.xpack.application.connector.syncjob.ConnectorSyncJobConstants.EMPTY_CONNECTOR_SYNC_JOB_ID_ERROR_MESSAGE; -public class UpdateConnectorSyncJobIngestionStatsAction extends ActionType { +public class UpdateConnectorSyncJobIngestionStatsAction extends ActionType { public static final UpdateConnectorSyncJobIngestionStatsAction INSTANCE = new UpdateConnectorSyncJobIngestionStatsAction(); public static final String NAME = "cluster:admin/xpack/connector/sync_job/update_stats"; public UpdateConnectorSyncJobIngestionStatsAction() { - super(NAME, AcknowledgedResponse::readFrom); + super(NAME, ConnectorUpdateActionResponse::new); } public static class Request extends ActionRequest implements ToXContentObject { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRuleCriteria.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRuleCriteria.java index 39a2b1c6ab6d2..ef42a7d7c64f2 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRuleCriteria.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRuleCriteria.java @@ -37,7 +37,7 @@ public class QueryRuleCriteria implements Writeable, ToXContentObject { - public static final TransportVersion CRITERIA_METADATA_VALUES_TRANSPORT_VERSION = TransportVersions.V_8_500_046; + public static final TransportVersion CRITERIA_METADATA_VALUES_TRANSPORT_VERSION = TransportVersions.V_8_500_061; private final QueryRuleCriteriaType criteriaType; private final String criteriaMetadata; private final List criteriaValues; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java index 0a1ff919493c3..fcd0f6be8fbcb 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/QueryRulesetListItem.java @@ -27,8 +27,7 @@ */ public class QueryRulesetListItem implements Writeable, ToXContentObject { - // TODO we need to actually bump transport version, but there's no point until main is merged. Placeholder for now. - public static final TransportVersion EXPANDED_RULESET_COUNT_TRANSPORT_VERSION = TransportVersions.V_8_500_052; + public static final TransportVersion EXPANDED_RULESET_COUNT_TRANSPORT_VERSION = TransportVersions.V_8_500_061; public static final ParseField RULESET_ID_FIELD = new ParseField("ruleset_id"); public static final ParseField RULE_TOTAL_COUNT_FIELD = new ParseField("rule_total_count"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java index ebd78119ab7d5..78cde38ec8c4d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/rules/RuleQueryBuilder.java @@ -73,7 +73,7 @@ public class RuleQueryBuilder extends AbstractQueryBuilder { @Override public TransportVersion getMinimalSupportedVersion() { - return TransportVersions.V_8_500_040; + return TransportVersions.V_8_500_061; } public RuleQueryBuilder(QueryBuilder organicQuery, Map matchCriteria, String rulesetId) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestRenderSearchApplicationQueryAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestRenderSearchApplicationQueryAction.java index a4ce64181c48e..97f30d2ca8722 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestRenderSearchApplicationQueryAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/search/action/RestRenderSearchApplicationQueryAction.java @@ -42,7 +42,9 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest restRequest, NodeC final String searchAppName = restRequest.param("name"); SearchApplicationSearchRequest request; if (restRequest.hasContent()) { - request = SearchApplicationSearchRequest.fromXContent(searchAppName, restRequest.contentParser()); + try (var parser = restRequest.contentParser()) { + request = SearchApplicationSearchRequest.fromXContent(searchAppName, parser); + } } else { request = new SearchApplicationSearchRequest(searchAppName); } diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java index 9401a2a58403e..cdfa3dea8a6fa 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/ConnectorTests.java @@ -98,6 +98,31 @@ public void testToXContent() throws IOException { } ], "value":"" + }, + "field_with_null_tooltip":{ + "default_value":null, + "depends_on":[ + { + "field":"some_field", + "value":true + } + ], + "display":"textbox", + "label":"Very important field", + "options":[], + "order":4, + "required":true, + "sensitive":false, + "tooltip":null, + "type":"str", + "ui_restrictions":[], + "validations":[ + { + "constraint":0, + "type":"greater_than" + } + ], + "value":"" } }, "description":"test-connector", diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ConnectorUpdateActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ConnectorUpdateActionResponseBWCSerializingTests.java new file mode 100644 index 0000000000000..d70aa892788ce --- /dev/null +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/ConnectorUpdateActionResponseBWCSerializingTests.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.application.connector.action; + +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.DocWriteResponse; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; + +import java.io.IOException; + +public class ConnectorUpdateActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase { + + @Override + protected Writeable.Reader instanceReader() { + return ConnectorUpdateActionResponse::new; + } + + @Override + protected ConnectorUpdateActionResponse createTestInstance() { + return new ConnectorUpdateActionResponse(randomFrom(DocWriteResponse.Result.values())); + } + + @Override + protected ConnectorUpdateActionResponse mutateInstance(ConnectorUpdateActionResponse instance) throws IOException { + return randomValueOtherThan(instance, this::createTestInstance); + } + + @Override + protected ConnectorUpdateActionResponse mutateInstanceForVersion(ConnectorUpdateActionResponse instance, TransportVersion version) { + return instance; + } +} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionResponseBWCSerializingTests.java deleted file mode 100644 index d4aa4f12b36d3..0000000000000 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorConfigurationActionResponseBWCSerializingTests.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.application.connector.action; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; - -import java.io.IOException; - -public class UpdateConnectorConfigurationActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< - UpdateConnectorConfigurationAction.Response> { - - @Override - protected Writeable.Reader instanceReader() { - return UpdateConnectorConfigurationAction.Response::new; - } - - @Override - protected UpdateConnectorConfigurationAction.Response createTestInstance() { - return new UpdateConnectorConfigurationAction.Response(randomFrom(DocWriteResponse.Result.values())); - } - - @Override - protected UpdateConnectorConfigurationAction.Response mutateInstance(UpdateConnectorConfigurationAction.Response instance) - throws IOException { - return randomValueOtherThan(instance, this::createTestInstance); - } - - @Override - protected UpdateConnectorConfigurationAction.Response mutateInstanceForVersion( - UpdateConnectorConfigurationAction.Response instance, - TransportVersion version - ) { - return instance; - } -} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionResponseBWCSerializingTests.java deleted file mode 100644 index a39fcac3d2f04..0000000000000 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorErrorActionResponseBWCSerializingTests.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.application.connector.action; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; - -import java.io.IOException; - -public class UpdateConnectorErrorActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< - UpdateConnectorErrorAction.Response> { - - @Override - protected Writeable.Reader instanceReader() { - return UpdateConnectorErrorAction.Response::new; - } - - @Override - protected UpdateConnectorErrorAction.Response createTestInstance() { - return new UpdateConnectorErrorAction.Response(randomFrom(DocWriteResponse.Result.values())); - } - - @Override - protected UpdateConnectorErrorAction.Response mutateInstance(UpdateConnectorErrorAction.Response instance) throws IOException { - return randomValueOtherThan(instance, this::createTestInstance); - } - - @Override - protected UpdateConnectorErrorAction.Response mutateInstanceForVersion( - UpdateConnectorErrorAction.Response instance, - TransportVersion version - ) { - return instance; - } -} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringActionResponseBWCSerializingTests.java deleted file mode 100644 index 0f33eeac8dfb5..0000000000000 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorFilteringActionResponseBWCSerializingTests.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.application.connector.action; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; - -import java.io.IOException; - -public class UpdateConnectorFilteringActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< - UpdateConnectorFilteringAction.Response> { - - @Override - protected Writeable.Reader instanceReader() { - return UpdateConnectorFilteringAction.Response::new; - } - - @Override - protected UpdateConnectorFilteringAction.Response createTestInstance() { - return new UpdateConnectorFilteringAction.Response(randomFrom(DocWriteResponse.Result.values())); - } - - @Override - protected UpdateConnectorFilteringAction.Response mutateInstance(UpdateConnectorFilteringAction.Response instance) throws IOException { - return randomValueOtherThan(instance, this::createTestInstance); - } - - @Override - protected UpdateConnectorFilteringAction.Response mutateInstanceForVersion( - UpdateConnectorFilteringAction.Response instance, - TransportVersion version - ) { - return instance; - } -} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionResponseBWCSerializingTests.java deleted file mode 100644 index d992f1b5f188e..0000000000000 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSeenActionResponseBWCSerializingTests.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.application.connector.action; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; - -import java.io.IOException; - -public class UpdateConnectorLastSeenActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< - UpdateConnectorLastSeenAction.Response> { - - @Override - protected Writeable.Reader instanceReader() { - return UpdateConnectorLastSeenAction.Response::new; - } - - @Override - protected UpdateConnectorLastSeenAction.Response createTestInstance() { - return new UpdateConnectorLastSeenAction.Response(randomFrom(DocWriteResponse.Result.values())); - } - - @Override - protected UpdateConnectorLastSeenAction.Response mutateInstance(UpdateConnectorLastSeenAction.Response instance) throws IOException { - return randomValueOtherThan(instance, this::createTestInstance); - } - - @Override - protected UpdateConnectorLastSeenAction.Response mutateInstanceForVersion( - UpdateConnectorLastSeenAction.Response instance, - TransportVersion version - ) { - return instance; - } -} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionResponseBWCSerializingTests.java deleted file mode 100644 index dd214e10699ef..0000000000000 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorLastSyncStatsActionResponseBWCSerializingTests.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.application.connector.action; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; - -import java.io.IOException; - -public class UpdateConnectorLastSyncStatsActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< - UpdateConnectorLastSyncStatsAction.Response> { - - @Override - protected Writeable.Reader instanceReader() { - return UpdateConnectorLastSyncStatsAction.Response::new; - } - - @Override - protected UpdateConnectorLastSyncStatsAction.Response createTestInstance() { - return new UpdateConnectorLastSyncStatsAction.Response(randomFrom(DocWriteResponse.Result.values())); - } - - @Override - protected UpdateConnectorLastSyncStatsAction.Response mutateInstance(UpdateConnectorLastSyncStatsAction.Response instance) - throws IOException { - return randomValueOtherThan(instance, this::createTestInstance); - } - - @Override - protected UpdateConnectorLastSyncStatsAction.Response mutateInstanceForVersion( - UpdateConnectorLastSyncStatsAction.Response instance, - TransportVersion version - ) { - return instance; - } -} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionResponseBWCSerializingTests.java deleted file mode 100644 index 2297ccb565b5e..0000000000000 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorNameActionResponseBWCSerializingTests.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.application.connector.action; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; - -import java.io.IOException; - -public class UpdateConnectorNameActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< - UpdateConnectorNameAction.Response> { - - @Override - protected Writeable.Reader instanceReader() { - return UpdateConnectorNameAction.Response::new; - } - - @Override - protected UpdateConnectorNameAction.Response createTestInstance() { - return new UpdateConnectorNameAction.Response(randomFrom(DocWriteResponse.Result.values())); - } - - @Override - protected UpdateConnectorNameAction.Response mutateInstance(UpdateConnectorNameAction.Response instance) throws IOException { - return randomValueOtherThan(instance, this::createTestInstance); - } - - @Override - protected UpdateConnectorNameAction.Response mutateInstanceForVersion( - UpdateConnectorNameAction.Response instance, - TransportVersion version - ) { - return instance; - } -} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineActionResponseBWCSerializingTests.java deleted file mode 100644 index 065dafcaf00a4..0000000000000 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorPipelineActionResponseBWCSerializingTests.java +++ /dev/null @@ -1,41 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.application.connector.action; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; - -import java.io.IOException; - -public class UpdateConnectorPipelineActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< - UpdateConnectorPipelineAction.Response> { - @Override - protected Writeable.Reader instanceReader() { - return UpdateConnectorPipelineAction.Response::new; - } - - @Override - protected UpdateConnectorPipelineAction.Response createTestInstance() { - return new UpdateConnectorPipelineAction.Response(randomFrom(DocWriteResponse.Result.values())); - } - - @Override - protected UpdateConnectorPipelineAction.Response mutateInstance(UpdateConnectorPipelineAction.Response instance) throws IOException { - return randomValueOtherThan(instance, this::createTestInstance); - } - - @Override - protected UpdateConnectorPipelineAction.Response mutateInstanceForVersion( - UpdateConnectorPipelineAction.Response instance, - TransportVersion version - ) { - return instance; - } -} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingActionResponseBWCSerializingTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingActionResponseBWCSerializingTests.java deleted file mode 100644 index a03713fa61a36..0000000000000 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/action/UpdateConnectorSchedulingActionResponseBWCSerializingTests.java +++ /dev/null @@ -1,43 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.application.connector.action; - -import org.elasticsearch.TransportVersion; -import org.elasticsearch.action.DocWriteResponse; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.xpack.core.ml.AbstractBWCWireSerializationTestCase; - -import java.io.IOException; - -public class UpdateConnectorSchedulingActionResponseBWCSerializingTests extends AbstractBWCWireSerializationTestCase< - UpdateConnectorSchedulingAction.Response> { - - @Override - protected Writeable.Reader instanceReader() { - return UpdateConnectorSchedulingAction.Response::new; - } - - @Override - protected UpdateConnectorSchedulingAction.Response createTestInstance() { - return new UpdateConnectorSchedulingAction.Response(randomFrom(DocWriteResponse.Result.values())); - } - - @Override - protected UpdateConnectorSchedulingAction.Response mutateInstance(UpdateConnectorSchedulingAction.Response instance) - throws IOException { - return randomValueOtherThan(instance, this::createTestInstance); - } - - @Override - protected UpdateConnectorSchedulingAction.Response mutateInstanceForVersion( - UpdateConnectorSchedulingAction.Response instance, - TransportVersion version - ) { - return instance; - } -} diff --git a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java index 04629b6ee9751..b82db8d04d3a9 100644 --- a/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java +++ b/x-pack/plugin/ent-search/src/test/java/org/elasticsearch/xpack/application/connector/syncjob/ConnectorSyncJobTests.java @@ -332,6 +332,151 @@ public void testFromXContent_WithAllNullableFieldsSetToNull_DoesNotThrow() throw ConnectorSyncJob.fromXContentBytes(new BytesArray(content), XContentType.JSON); } + public void testSyncJobConnectorFromXContent_WithAllFieldsSet() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "id": "connector-id", + "filtering": [ + { + "active": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + }, + "domain": "DEFAULT", + "draft": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + } + } + ], + "index_name": "search-connector", + "language": "english", + "pipeline": { + "extract_binary_content": true, + "name": "ent-search-generic-ingestion", + "reduce_whitespace": true, + "run_ml_inference": false + }, + "service_type": "service type", + "configuration": {} + } + """); + + Connector connector = ConnectorSyncJob.syncJobConnectorFromXContentBytes(new BytesArray(content), null, XContentType.JSON); + + assertThat(connector.getConnectorId(), equalTo("connector-id")); + assertThat(connector.getFiltering().size(), equalTo(1)); + assertThat(connector.getIndexName(), equalTo("search-connector")); + assertThat(connector.getLanguage(), equalTo("english")); + assertThat(connector.getPipeline(), notNullValue()); + assertThat(connector.getServiceType(), equalTo("service type")); + assertThat(connector.getConfiguration(), notNullValue()); + } + + public void testSyncJobConnectorFromXContent_WithAllNonOptionalFieldsSet_DoesNotThrow() throws IOException { + String content = XContentHelper.stripWhitespace(""" + { + "id": "connector-id", + "filtering": [ + { + "active": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + }, + "domain": "DEFAULT", + "draft": { + "advanced_snippet": { + "created_at": "2023-12-01T14:18:37.397819Z", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": {} + }, + "rules": [ + { + "created_at": "2023-12-01T14:18:37.397819Z", + "field": "_", + "id": "DEFAULT", + "order": 0, + "policy": "include", + "rule": "regex", + "updated_at": "2023-12-01T14:18:37.397819Z", + "value": ".*" + } + ], + "validation": { + "errors": [], + "state": "valid" + } + } + } + ], + "index_name": null, + "language": null, + "pipeline": null, + "service_type": null, + "configuration": {} + } + """); + + ConnectorSyncJob.syncJobConnectorFromXContentBytes(new BytesArray(content), null, XContentType.JSON); + } + private void assertTransportSerialization(ConnectorSyncJob testInstance) throws IOException { ConnectorSyncJob deserializedInstance = copyInstance(testInstance); assertNotSame(testInstance, deserializedInstance); diff --git a/x-pack/plugin/eql/qa/common/src/main/resources/test_missing_events.toml b/x-pack/plugin/eql/qa/common/src/main/resources/test_missing_events.toml index bfe5465adebcf..0d546940c72a1 100644 --- a/x-pack/plugin/eql/qa/common/src/main/resources/test_missing_events.toml +++ b/x-pack/plugin/eql/qa/common/src/main/resources/test_missing_events.toml @@ -385,3 +385,16 @@ join_keys = ["foo", "foo", "foo", "foo", "baz", "baz"] +[[queries]] +name = "interleaved_3_missing" +query = ''' + sequence with maxspan=1h + ![ test1 where tag == "foobar" ] + [ test1 where tag == "normal" ] + ![ test1 where tag == "foobar" ] + [ test1 where tag == "normal" ] + ![ test1 where tag == "foobar" ] +''' +expected_event_ids = [-1, 1, -1, 2, -1, + -1, 2, -1, 4, -1] + diff --git a/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/50_samples.yml b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/50_samples.yml new file mode 100644 index 0000000000000..0c413e809689a --- /dev/null +++ b/x-pack/plugin/eql/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/eql/50_samples.yml @@ -0,0 +1,80 @@ +--- +setup: + - do: + indices.create: + index: sample1 + body: + mappings: + properties: + ip: + type: ip + version: + type: version + missing_keyword: + type: keyword + type_test: + type: keyword + "@timestamp_pretty": + type: date + format: dd-MM-yyyy + event_type: + type: keyword + event: + properties: + category: + type: alias + path: event_type + host: + type: keyword + os: + type: keyword + bool: + type: boolean + uptime: + type: long + port: + type: long + - do: + bulk: + refresh: true + body: + - '{"index" : { "_index" : "sample1" }}' + - '{"@timestamp_pretty":"12-12-2022","type_test":"abc","event_type":"alert","os":"win10","port":1234,"missing_keyword":"test","ip":"10.0.0.1","host":"doom","id":11,"version":"1.0.0","uptime":0}' + - '{"index" : { "_index" : "sample1" }}' + - '{"@timestamp_pretty":"13-12-2022","event_type":"alert","type_test":"abc","os":"win10","port":1,"host":"CS","id":12,"version":"1.2.0","uptime":5}' + - '{"index" : { "_index" : "sample1" }}' + - '{"@timestamp_pretty":"12-12-2022","event_type":"alert","type_test":"abc","bool":false,"os":"win10","port":1234,"host":"farcry","id":13,"version":"2.0.0","uptime":1}' + - '{"index" : { "_index" : "sample1" }}' + - '{"@timestamp_pretty":"13-12-2022","event_type":"alert","type_test":"abc","os":"slack","port":12,"host":"GTA","id":14,"version":"10.0.0","uptime":3}' + - '{"index" : { "_index" : "sample1" }}' + - '{"@timestamp_pretty":"17-12-2022","event_type":"alert","os":"fedora","port":1234,"host":"sniper 3d","id":15,"version":"20.1.0","uptime":6}' + - '{"index" : { "_index" : "sample1" }}' + - '{"@timestamp_pretty":"17-12-2022","event_type":"alert","bool":true,"os":"redhat","port":65123,"host":"doom","id":16,"version":"20.10.0"}' + - '{"index" : { "_index" : "sample1" }}' + - '{"@timestamp_pretty":"17-12-2022","event_type":"failure","bool":true,"os":"redhat","port":1234,"missing_keyword":"yyy","host":"doom","id":17,"version":"20.2.0","uptime":15}' + - '{"index" : { "_index" : "sample1" }}' + - '{"@timestamp_pretty":"12-12-2022","event_type":"success","os":"win10","port":512,"missing_keyword":"test","host":"doom","id":18,"version":"1.2.3","uptime":16}' + - '{"index" : { "_index" : "sample1" }}' + - '{"@timestamp_pretty":"15-12-2022","event_type":"success","bool":true,"os":"win10","port":12,"missing_keyword":"test","host":"GTA","id":19,"version":"1.2.3"}' + - '{"index" : { "_index" : "sample1" }}' + - '{"event_type":"alert","bool":true,"os":"win10","port":1234,"missing_keyword":null,"ip":"10.0.0.5","host":"farcry","id":110,"version":"1.2.3","uptime":1}' + +--- +# Test an empty reply due to query filtering +"Execute some EQL.": + - do: + eql.search: + index: sample1 + body: + query: 'sample by host [any where uptime > 0] by os [any where port > 100] by os [any where bool == true] by os' + filter: + range: + "@timestamp_pretty": + gte: now-5m + lte: now + + - match: {timed_out: false} + - match: {hits.total.value: 0} + - match: {hits.total.relation: "eq"} + - match: {hits.sequences: []} + diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java index c22cf7d390628..3be9e23c38b46 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/action/EqlSearchResponse.java @@ -281,7 +281,7 @@ private Event(StreamInput in) throws IOException { } else { fetchFields = null; } - if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { + if (in.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { missing = in.readBoolean(); } else { missing = index.isEmpty(); @@ -304,7 +304,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeMap(fetchFields, StreamOutput::writeWriteable); } } - if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_040)) { + if (out.getTransportVersion().onOrAfter(TransportVersions.V_8_500_061)) { // for BWC, 8.9.1+ does not have "missing" attribute, but it considers events with an empty index "" as missing events // see https://github.com/elastic/elasticsearch/pull/98130 out.writeBoolean(missing); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sample/SampleIterator.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sample/SampleIterator.java index a96102dad6cfb..f4b933300dcd7 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sample/SampleIterator.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sample/SampleIterator.java @@ -147,6 +147,12 @@ private void advance(ActionListener listener) { private void queryForCompositeAggPage(ActionListener listener, final SampleQueryRequest request) { client.query(request, listener.delegateFailureAndWrap((delegate, r) -> { + // either the fields values or the fields themselves are missing + // or the filter applied on the eql query matches no documents + if (r.hasAggregations() == false) { + payload(delegate); + return; + } Aggregation a = r.getAggregations().get(COMPOSITE_AGG_NAME); if (a instanceof InternalComposite == false) { throw new EqlIllegalArgumentException("Unexpected aggregation result type returned [{}]", a.getClass()); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java index 707964a93ab9e..befb2c7503515 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClient.java @@ -46,10 +46,12 @@ public class PITAwareQueryClient extends BasicQueryClient { private String pitId; private final TimeValue keepAlive; + private final QueryBuilder filter; public PITAwareQueryClient(EqlSession eqlSession) { super(eqlSession); this.keepAlive = eqlSession.configuration().requestTimeout(); + this.filter = eqlSession.configuration().filter(); } @Override @@ -98,6 +100,7 @@ private void searchWithPIT(MultiSearchRequest search, ActionListener ActionListener pitListener(Function void openPIT(ActionListener listener, Runnable runnable) { OpenPointInTimeRequest request = new OpenPointInTimeRequest(indices).indicesOptions(IndexResolver.FIELD_CAPS_INDICES_OPTIONS) .keepAlive(keepAlive); + request.indexFilter(filter); client.execute(TransportOpenPointInTimeAction.TYPE, request, listener.delegateFailureAndWrap((l, r) -> { pitId = r.getPointInTimeId(); runnable.run(); diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/Sequence.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/Sequence.java index 813c1fd1a2aae..fb925f3245faa 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/Sequence.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/Sequence.java @@ -32,14 +32,17 @@ public class Sequence implements Comparable, Accountable { private final SequenceKey key; private final Match[] matches; + private int firstStage; private int currentStage = 0; @SuppressWarnings({ "rawtypes", "unchecked" }) - public Sequence(SequenceKey key, int stages, Ordinal ordinal, HitReference firstHit) { + public Sequence(SequenceKey key, int stages, int firstStage, Ordinal ordinal, HitReference firstHit) { Check.isTrue(stages >= 2, "A sequence requires at least 2 criteria, given [{}]", stages); this.key = key; this.matches = new Match[stages]; - this.matches[0] = new Match(ordinal, firstHit); + this.matches[firstStage] = new Match(ordinal, firstHit); + this.firstStage = firstStage; + this.currentStage = firstStage; } public void putMatch(int stage, Ordinal ordinal, HitReference hit) { @@ -56,7 +59,7 @@ public Ordinal ordinal() { } public Ordinal startOrdinal() { - return matches[0].ordinal(); + return matches[firstStage].ordinal(); } public List hits() { diff --git a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java index adb8ee1b43c02..1ad9002f88999 100644 --- a/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java +++ b/x-pack/plugin/eql/src/main/java/org/elasticsearch/xpack/eql/execution/sequence/SequenceMatcher.java @@ -168,7 +168,7 @@ boolean match(int stage, Iterable> hits) { if (isFirstPositiveStage(stage)) { log.trace("Matching hit {} - track sequence", ko.ordinal); - Sequence seq = new Sequence(ko.key, numberOfStages, ko.ordinal, hit); + Sequence seq = new Sequence(ko.key, numberOfStages, stage, ko.ordinal, hit); if (lastPositiveStage == stage) { tryComplete(seq); } else { diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java index 080cc26d81eb2..edbeb3d0a0d8c 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/action/EqlSearchResponseTests.java @@ -289,7 +289,7 @@ private List mutateEvents(List original, TransportVersion version) e.id(), e.source(), version.onOrAfter(TransportVersions.V_7_13_0) ? e.fetchFields() : null, - version.onOrAfter(TransportVersions.V_8_500_040) ? e.missing() : e.index().isEmpty() + version.onOrAfter(TransportVersions.V_8_500_061) ? e.missing() : e.index().isEmpty() ) ); } diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java index 85a34d7b6a943..f391e9bdae84b 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/ImplicitTiebreakerTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse.Clusters; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.core.TimeValue; import org.elasticsearch.search.DocValueFormat; @@ -83,8 +82,10 @@ public void query(QueryRequest r, ActionListener l) { ) ); SearchHits searchHits = new SearchHits(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); - SearchResponseSections internal = new SearchResponseSections(searchHits, null, null, false, false, null, 0); - ActionListener.respondAndRelease(l, new SearchResponse(internal, null, 0, 1, 0, 0, null, Clusters.EMPTY)); + ActionListener.respondAndRelease( + l, + new SearchResponse(searchHits, null, null, false, false, null, 0, null, 0, 1, 0, 0, null, Clusters.EMPTY) + ); } @Override diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java index 336526a1153a5..a7ac6637c2e56 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/assembler/SequenceSpecTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse.Clusters; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.document.DocumentField; @@ -221,8 +220,10 @@ public void query(QueryRequest r, ActionListener l) { new TotalHits(eah.hits.size(), Relation.EQUAL_TO), 0.0f ); - SearchResponseSections internal = new SearchResponseSections(searchHits, null, null, false, false, null, 0); - ActionListener.respondAndRelease(l, new SearchResponse(internal, null, 0, 1, 0, 0, null, Clusters.EMPTY)); + ActionListener.respondAndRelease( + l, + new SearchResponse(searchHits, null, null, false, false, null, 0, null, 0, 1, 0, 0, null, Clusters.EMPTY) + ); } @Override diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java index e787505f7dfe3..8c47bfeb8921d 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sample/CircuitBreakerTests.java @@ -18,7 +18,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse.Clusters; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; @@ -222,12 +221,16 @@ protected void @SuppressWarnings("unchecked") void handleSearchRequest(ActionListener listener, SearchRequest searchRequest) { Aggregations aggs = new Aggregations(List.of(newInternalComposite())); - - SearchResponseSections internal = new SearchResponseSections(null, aggs, null, false, false, null, 0); ActionListener.respondAndRelease( listener, (Response) new SearchResponse( - internal, + null, + aggs, + null, + false, + false, + null, + 0, null, 2, 0, diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClientTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClientTests.java new file mode 100644 index 0000000000000..9c9bbfcdc5127 --- /dev/null +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/search/PITAwareQueryClientTests.java @@ -0,0 +1,289 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.eql.execution.search; + +import org.apache.lucene.search.TotalHits; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.search.ClosePointInTimeRequest; +import org.elasticsearch.action.search.ClosePointInTimeResponse; +import org.elasticsearch.action.search.OpenPointInTimeRequest; +import org.elasticsearch.action.search.OpenPointInTimeResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.RangeQueryBuilder; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchSortValues; +import org.elasticsearch.search.builder.SearchSourceBuilder; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.client.NoOpClient; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.async.AsyncExecutionId; +import org.elasticsearch.xpack.eql.action.EqlSearchAction; +import org.elasticsearch.xpack.eql.action.EqlSearchTask; +import org.elasticsearch.xpack.eql.analysis.PostAnalyzer; +import org.elasticsearch.xpack.eql.analysis.PreAnalyzer; +import org.elasticsearch.xpack.eql.analysis.Verifier; +import org.elasticsearch.xpack.eql.execution.assembler.BoxedQueryRequest; +import org.elasticsearch.xpack.eql.execution.assembler.SequenceCriterion; +import org.elasticsearch.xpack.eql.execution.search.extractor.ImplicitTiebreakerHitExtractor; +import org.elasticsearch.xpack.eql.execution.sequence.SequenceMatcher; +import org.elasticsearch.xpack.eql.execution.sequence.TumblingWindow; +import org.elasticsearch.xpack.eql.expression.function.EqlFunctionRegistry; +import org.elasticsearch.xpack.eql.optimizer.Optimizer; +import org.elasticsearch.xpack.eql.planner.Planner; +import org.elasticsearch.xpack.eql.session.EqlConfiguration; +import org.elasticsearch.xpack.eql.session.EqlSession; +import org.elasticsearch.xpack.eql.stats.Metrics; +import org.elasticsearch.xpack.ql.execution.search.extractor.HitExtractor; +import org.elasticsearch.xpack.ql.index.IndexResolver; +import org.elasticsearch.xpack.ql.type.DefaultDataTypeRegistry; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; +import static java.util.Collections.emptySet; +import static org.elasticsearch.action.ActionListener.wrap; +import static org.elasticsearch.index.query.QueryBuilders.idsQuery; +import static org.elasticsearch.index.query.QueryBuilders.matchAllQuery; +import static org.elasticsearch.index.query.QueryBuilders.rangeQuery; +import static org.elasticsearch.index.query.QueryBuilders.termQuery; +import static org.elasticsearch.index.query.QueryBuilders.termsQuery; +import static org.elasticsearch.xpack.eql.EqlTestUtils.booleanArrayOf; + +public class PITAwareQueryClientTests extends ESTestCase { + + private final List keyExtractors = emptyList(); + private static final QueryBuilder[] FILTERS = new QueryBuilder[] { + rangeQuery("some_timestamp_field").gte("2023-12-07"), + termQuery("tier", "hot"), + idsQuery().addIds("1", "2", "3") }; + private static final String[] INDICES = new String[] { "test1", "test2", "test3" }; + + public void testQueryFilterUsedInPitAndSearches() { + try (var threadPool = createThreadPool()) { + final var filter = frequently() ? randomFrom(FILTERS) : null; + int stages = randomIntBetween(2, 5); + final var esClient = new ESMockClient(threadPool, filter, stages); + + EqlConfiguration eqlConfiguration = new EqlConfiguration( + INDICES, + org.elasticsearch.xpack.ql.util.DateUtils.UTC, + "nobody", + "cluster", + filter, + emptyMap(), + null, + TimeValue.timeValueSeconds(30), + null, + 123, + 1, + "", + new TaskId("test", 123), + new EqlSearchTask( + randomLong(), + "transport", + EqlSearchAction.NAME, + "", + null, + emptyMap(), + emptyMap(), + new AsyncExecutionId("", new TaskId(randomAlphaOfLength(10), 1)), + TimeValue.timeValueDays(5) + ) + ); + IndexResolver indexResolver = new IndexResolver(esClient, "cluster", DefaultDataTypeRegistry.INSTANCE, () -> emptySet()); + CircuitBreaker cb = new NoopCircuitBreaker("testcb"); + EqlSession eqlSession = new EqlSession( + esClient, + eqlConfiguration, + indexResolver, + new PreAnalyzer(), + new PostAnalyzer(), + new EqlFunctionRegistry(), + new Verifier(new Metrics()), + new Optimizer(), + new Planner(), + cb + ); + QueryClient eqlClient = new PITAwareQueryClient(eqlSession) { + @Override + public void fetchHits(Iterable> refs, ActionListener>> listener) { + List> searchHits = new ArrayList<>(); + for (List ref : refs) { + List hits = new ArrayList<>(ref.size()); + for (HitReference hitRef : ref) { + hits.add(new SearchHit(-1, hitRef.id())); + } + searchHits.add(hits); + } + listener.onResponse(searchHits); + } + }; + + List criteria = new ArrayList<>(stages); + for (int i = 0; i < stages; i++) { + final int j = i; + criteria.add( + new SequenceCriterion( + i, + new BoxedQueryRequest( + () -> SearchSourceBuilder.searchSource().size(10).query(matchAllQuery()).terminateAfter(j), + "@timestamp", + emptyList(), + emptySet() + ), + keyExtractors, + TimestampExtractor.INSTANCE, + null, + ImplicitTiebreakerHitExtractor.INSTANCE, + false, + false + ) + ); + } + + SequenceMatcher matcher = new SequenceMatcher(stages, false, TimeValue.MINUS_ONE, null, booleanArrayOf(stages, false), cb); + TumblingWindow window = new TumblingWindow(eqlClient, criteria, null, matcher, Collections.emptyList()); + window.execute(wrap(response -> { + // do nothing, we don't care about the query results + }, ex -> { fail("Shouldn't have failed"); })); + } + } + + /** + * This class is used by {@code PITFailureTests.testPitCloseOnFailure} method + * to test that PIT close is never (wrongly) invoked if PIT open failed. + */ + private class ESMockClient extends NoOpClient { + private final QueryBuilder filter; + private final String pitId = "test_pit_id"; + private boolean openedPIT = false; + private int searchRequestsRemainingCount; + + ESMockClient(ThreadPool threadPool, QueryBuilder filter, int stages) { + super(threadPool); + this.filter = filter; + this.searchRequestsRemainingCount = stages; + } + + @SuppressWarnings("unchecked") + @Override + protected void doExecute( + ActionType action, + Request request, + ActionListener listener + ) { + if (request instanceof OpenPointInTimeRequest openPIT) { + assertFalse(openedPIT); + assertEquals(filter, openPIT.indexFilter()); // check that the filter passed on to the eql query is used in opening the pit + assertArrayEquals(INDICES, openPIT.indices()); // indices for opening pit should be the same as for the eql query itself + + openedPIT = true; + OpenPointInTimeResponse response = new OpenPointInTimeResponse(pitId); + listener.onResponse((Response) response); + } else if (request instanceof ClosePointInTimeRequest closePIT) { + assertTrue(openedPIT); + assertEquals(pitId, closePIT.getId()); + + openedPIT = false; + ClosePointInTimeResponse response = new ClosePointInTimeResponse(true, 1); + listener.onResponse((Response) response); + } else if (request instanceof SearchRequest searchRequest) { + assertTrue(openedPIT); + searchRequestsRemainingCount--; + assertTrue(searchRequestsRemainingCount >= 0); + + assertEquals(pitId, searchRequest.source().pointInTimeBuilder().getEncodedId()); + assertEquals(0, searchRequest.indices().length); // no indices set in the search request + assertEquals(1, searchRequest.source().subSearches().size()); + + BoolQueryBuilder actualQuery = (BoolQueryBuilder) searchRequest.source().subSearches().get(0).getQueryBuilder(); + assertEquals(3, actualQuery.filter().size()); + assertTrue(actualQuery.filter().get(0) instanceof MatchAllQueryBuilder); // the match_all we used when building the criteria + assertTrue(actualQuery.filter().get(1) instanceof RangeQueryBuilder); + QueryBuilder expectedQuery = termsQuery("_index", INDICES); // indices should be used as a filter further on + assertEquals(expectedQuery, actualQuery.filter().get(2)); + + handleSearchRequest(listener, searchRequest); + } else { + super.doExecute(action, request, listener); + } + } + + @SuppressWarnings("unchecked") + void handleSearchRequest(ActionListener listener, SearchRequest searchRequest) { + int ordinal = searchRequest.source().terminateAfter(); + SearchHit searchHit = new SearchHit(ordinal, String.valueOf(ordinal)); + searchHit.sortValues( + new SearchSortValues(new Long[] { (long) ordinal, 1L }, new DocValueFormat[] { DocValueFormat.RAW, DocValueFormat.RAW }) + ); + + SearchHits searchHits = new SearchHits(new SearchHit[] { searchHit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 0.0f); + SearchResponse response = new SearchResponse( + searchHits, + null, + null, + false, + false, + null, + 0, + null, + 2, + 0, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY, + searchRequest.pointInTimeBuilder().getEncodedId() + ); + + ActionListener.respondAndRelease(listener, (Response) response); + } + } + + private static class TimestampExtractor implements HitExtractor { + + static final TimestampExtractor INSTANCE = new TimestampExtractor(); + + @Override + public String getWriteableName() { + return null; + } + + @Override + public void writeTo(StreamOutput out) throws IOException {} + + @Override + public String hitName() { + return null; + } + + @Override + public Timestamp extract(SearchHit hit) { + return Timestamp.of(String.valueOf(hit.docId())); + } + } +} diff --git a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java index 2fbab70312892..7ef2b95d982fb 100644 --- a/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java +++ b/x-pack/plugin/eql/src/test/java/org/elasticsearch/xpack/eql/execution/sequence/CircuitBreakerTests.java @@ -21,7 +21,6 @@ import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponse.Clusters; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.common.ParsingException; @@ -46,7 +45,6 @@ import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.search.SearchSortValues; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.tasks.TaskId; @@ -114,8 +112,10 @@ public void query(QueryRequest r, ActionListener l) { new SearchSortValues(new Long[] { (long) ordinal, 1L }, new DocValueFormat[] { DocValueFormat.RAW, DocValueFormat.RAW }) ); SearchHits searchHits = new SearchHits(new SearchHit[] { searchHit }, new TotalHits(1, Relation.EQUAL_TO), 0.0f); - SearchResponseSections internal = new SearchResponseSections(searchHits, null, null, false, false, null, 0); - ActionListener.respondAndRelease(l, new SearchResponse(internal, null, 0, 1, 0, 0, null, Clusters.EMPTY)); + ActionListener.respondAndRelease( + l, + new SearchResponse(searchHits, null, null, false, false, null, 0, null, 0, 1, 0, 0, null, Clusters.EMPTY) + ); } @Override @@ -431,9 +431,14 @@ void handleSearchRequest(ActionListener void handleSearchRequest(ActionListener void handleSearchRequest(ActionListener *

- * The generation code also looks for the optional methods {@code combineStates} + * The generation code also looks for the optional methods {@code combineIntermediate} * and {@code evaluateFinal} which are used to combine intermediate states and * produce the final output. If the first is missing then the generated code will * call the {@code combine} method to combine intermediate states. If the second diff --git a/x-pack/plugin/esql/compute/build.gradle b/x-pack/plugin/esql/compute/build.gradle index 1851e2f449da0..d95a9ffd862f4 100644 --- a/x-pack/plugin/esql/compute/build.gradle +++ b/x-pack/plugin/esql/compute/build.gradle @@ -208,6 +208,28 @@ tasks.named('stringTemplates').configure { it.inputFile = arrayBlockInputFile it.outputFile = "org/elasticsearch/compute/data/BooleanArrayBlock.java" } + // BigArray block implementations + File bigArrayBlockInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st") + template { + it.properties = intProperties + it.inputFile = bigArrayBlockInputFile + it.outputFile = "org/elasticsearch/compute/data/IntBigArrayBlock.java" + } + template { + it.properties = longProperties + it.inputFile = bigArrayBlockInputFile + it.outputFile = "org/elasticsearch/compute/data/LongBigArrayBlock.java" + } + template { + it.properties = doubleProperties + it.inputFile = bigArrayBlockInputFile + it.outputFile = "org/elasticsearch/compute/data/DoubleBigArrayBlock.java" + } + template { + it.properties = booleanProperties + it.inputFile = bigArrayBlockInputFile + it.outputFile = "org/elasticsearch/compute/data/BooleanBigArrayBlock.java" + } // vector blocks File vectorBlockInputFile = new File("${projectDir}/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st") template { diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java index 6acddf6aa5cde..f992a9d8670b4 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/AggregatorImplementer.java @@ -69,7 +69,6 @@ public class AggregatorImplementer { private final ExecutableElement init; private final ExecutableElement combine; private final ExecutableElement combineValueCount; - private final ExecutableElement combineStates; private final ExecutableElement combineIntermediate; private final ExecutableElement evaluateFinal; private final ClassName implementation; @@ -95,7 +94,6 @@ public AggregatorImplementer(Elements elements, TypeElement declarationType, Int return firstParamType.isPrimitive() || firstParamType.toString().equals(stateType.toString()); }); this.combineValueCount = findMethod(declarationType, "combineValueCount"); - this.combineStates = findMethod(declarationType, "combineStates"); this.combineIntermediate = findMethod(declarationType, "combineIntermediate"); this.evaluateFinal = findMethod(declarationType, "evaluateFinal"); @@ -399,34 +397,30 @@ private MethodSpec addIntermediateInput() { builder.addAnnotation(Override.class).addModifiers(Modifier.PUBLIC).addParameter(PAGE, "page"); builder.addStatement("assert channels.size() == intermediateBlockCount()"); builder.addStatement("assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size()"); - builder.addStatement("Block uncastBlock = page.getBlock(channels.get(0))"); - builder.beginControlFlow("if (uncastBlock.areAllValuesNull())"); - { - builder.addStatement("return"); - builder.endControlFlow(); - } - int count = 0; - for (var interState : intermediateState) { + for (int i = 0; i < intermediateState.size(); i++) { + var interState = intermediateState.get(i); + ClassName blockType = blockType(interState.elementType()); + builder.addStatement("Block $L = page.getBlock(channels.get($L))", interState.name + "Uncast", i); + builder.beginControlFlow("if ($L.areAllValuesNull())", interState.name + "Uncast"); + { + builder.addStatement("return"); + builder.endControlFlow(); + } builder.addStatement( - "$T " + interState.name() + " = page.<$T>getBlock(channels.get(" + count + ")).asVector()", + "$T $L = (($T) $L).asVector()", vectorType(interState.elementType()), - blockType(interState.elementType()) + interState.name(), + blockType, + interState.name() + "Uncast" ); - count++; + builder.addStatement("assert $L.getPositionCount() == 1", interState.name()); } - final String first = intermediateState.get(0).name(); - builder.addStatement("assert " + first + ".getPositionCount() == 1"); - if (intermediateState.size() > 1) { - builder.addStatement( - "assert " - + intermediateState.stream() - .map(IntermediateStateDesc::name) - .skip(1) - .map(s -> first + ".getPositionCount() == " + s + ".getPositionCount()") - .collect(joining(" && ")) - ); - } - if (hasPrimitiveState()) { + if (combineIntermediate != null) { + if (intermediateState.stream().map(IntermediateStateDesc::elementType).anyMatch(n -> n.equals("BYTES_REF"))) { + builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); + } + builder.addStatement("$T.combineIntermediate(state, " + intermediateStateRowAccess() + ")", declarationType); + } else if (hasPrimitiveState()) { assert intermediateState.size() == 2; assert intermediateState.get(1).name().equals("seen"); builder.beginControlFlow("if (seen.getBoolean(0))"); @@ -438,10 +432,7 @@ private MethodSpec addIntermediateInput() { builder.endControlFlow(); } } else { - if (intermediateState.stream().map(IntermediateStateDesc::elementType).anyMatch(n -> n.equals("BYTES_REF"))) { - builder.addStatement("$T scratch = new $T()", BYTES_REF, BYTES_REF); - } - builder.addStatement("$T.combineIntermediate(state, " + intermediateStateRowAccess() + ")", declarationType); + throw new IllegalArgumentException("Don't know how to combine intermediate input. Define combineIntermediate"); } return builder.build(); } @@ -468,7 +459,7 @@ private String primitiveStateMethod() { return "doubleValue"; default: throw new IllegalArgumentException( - "don't know how to fetch primitive values from " + stateType + ". define combineStates." + "don't know how to fetch primitive values from " + stateType + ". define combineIntermediate." ); } } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java index 677740862cc04..6b218fab7affb 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/ConsumeProcessor.java @@ -40,7 +40,7 @@ public Set getSupportedAnnotationTypes() { "org.elasticsearch.xpack.esql.expression.function.FunctionInfo", "org.elasticsearch.xpack.esql.expression.function.Param", "org.elasticsearch.rest.ServerlessScope", - + "org.elasticsearch.xcontent.ParserConstructor", Fixed.class.getName() ); } diff --git a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java index f651ab2a316aa..6cd72bd643c32 100644 --- a/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java +++ b/x-pack/plugin/esql/compute/gen/src/main/java/org/elasticsearch/compute/gen/Methods.java @@ -194,7 +194,7 @@ static String vectorAccessorName(String elementTypeName) { case "DOUBLE" -> "getDouble"; case "BYTES_REF" -> "getBytesRef"; default -> throw new IllegalArgumentException( - "don't know how to fetch primitive values from " + elementTypeName + ". define combineStates." + "don't know how to fetch primitive values from " + elementTypeName + ". define combineIntermediate." ); }; } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java index f77f1893caa01..a417e8317bb23 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/DoubleArrayState.java @@ -60,7 +60,7 @@ void set(int groupId, double value) { Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try (DoubleVector.Builder builder = DoubleVector.newVectorBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (DoubleVector.Builder builder = driverContext.blockFactory().newDoubleVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { builder.appendDouble(values.get(selected.getInt(i))); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java index 82578090503ab..78518de2b16fb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/IntArrayState.java @@ -59,7 +59,7 @@ void set(int groupId, int value) { Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try (IntVector.Builder builder = IntVector.newVectorBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (IntVector.Builder builder = driverContext.blockFactory().newIntVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { builder.appendInt(values.get(selected.getInt(i))); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java index f77d22fb1d26a..ff91dc6693740 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/aggregation/LongArrayState.java @@ -66,7 +66,7 @@ void increment(int groupId, long value) { Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try (LongVector.Builder builder = LongVector.newVectorBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (LongVector.Builder builder = driverContext.blockFactory().newLongVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { builder.appendLong(values.get(selected.getInt(i))); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java index 7c2723163197a..f9b8358faee6b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayBlock.java @@ -8,25 +8,21 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; -import java.util.Arrays; import java.util.BitSet; /** - * Block implementation that stores an array of boolean. + * Block implementation that stores values in a {@link BooleanArrayVector}. * This class is generated. Do not edit it. */ -public final class BooleanArrayBlock extends AbstractArrayBlock implements BooleanBlock { +final class BooleanArrayBlock extends AbstractArrayBlock implements BooleanBlock { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BooleanArrayBlock.class); - private final boolean[] values; + private final BooleanArrayVector vector; - public BooleanArrayBlock(boolean[] values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { - this(values, positionCount, firstValueIndexes, nulls, mvOrdering, BlockFactory.getNonBreakingInstance()); - } - - public BooleanArrayBlock( + BooleanArrayBlock( boolean[] values, int positionCount, int[] firstValueIndexes, @@ -35,7 +31,7 @@ public BooleanArrayBlock( BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + this.vector = new BooleanArrayVector(values, values.length, blockFactory); } @Override @@ -45,7 +41,7 @@ public BooleanVector asVector() { @Override public boolean getBoolean(int valueIndex) { - return values[valueIndex]; + return vector.getBoolean(valueIndex); } @Override @@ -83,7 +79,7 @@ public BooleanBlock expand() { incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector try (var builder = blockFactory().newBooleanBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { @@ -100,14 +96,13 @@ public BooleanBlock expand() { } } - public static long ramBytesEstimated(boolean[] values, int[] firstValueIndexes, BitSet nullsMask) { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); } @Override public long ramBytesUsed() { - return ramBytesEstimated(values, firstValueIndexes, nullsMask); + return ramBytesUsedOnlyBlock() + vector.ramBytesUsed(); } @Override @@ -130,13 +125,20 @@ public String toString() { + getPositionCount() + ", mvOrdering=" + mvOrdering() - + ", values=" - + Arrays.toString(values) + + ", vector=" + + vector + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - blockFactory().adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java index 5aa8724eb0ca2..114d924df467c 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanArrayVector.java @@ -15,27 +15,20 @@ * Vector implementation that stores an array of boolean values. * This class is generated. Do not edit it. */ -public final class BooleanArrayVector extends AbstractVector implements BooleanVector { +final class BooleanArrayVector extends AbstractVector implements BooleanVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BooleanArrayVector.class); private final boolean[] values; - private final BooleanBlock block; - - public BooleanArrayVector(boolean[] values, int positionCount) { - this(values, positionCount, BlockFactory.getNonBreakingInstance()); - } - - public BooleanArrayVector(boolean[] values, int positionCount, BlockFactory blockFactory) { + BooleanArrayVector(boolean[] values, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.values = values; - this.block = new BooleanVectorBlock(this); } @Override public BooleanBlock asBlock() { - return block; + return new BooleanVectorBlock(this); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java new file mode 100644 index 0000000000000..17ed741bd59da --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayBlock.java @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.BitArray; +import org.elasticsearch.core.Releasables; + +import java.util.BitSet; + +/** + * Block implementation that stores values in a {@link BooleanBigArrayVector}. Does not take ownership of the given + * {@link BitArray} and does not adjust circuit breakers to account for it. + * This class is generated. Do not edit it. + */ +public final class BooleanBigArrayBlock extends AbstractArrayBlock implements BooleanBlock { + + private static final long BASE_RAM_BYTES_USED = 0; // TODO: fix this + private final BooleanBigArrayVector vector; + + public BooleanBigArrayBlock( + BitArray values, + int positionCount, + int[] firstValueIndexes, + BitSet nulls, + MvOrdering mvOrdering, + BlockFactory blockFactory + ) { + super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); + this.vector = new BooleanBigArrayVector(values, (int) values.size(), blockFactory); + } + + @Override + public BooleanVector asVector() { + return null; + } + + @Override + public boolean getBoolean(int valueIndex) { + return vector.getBoolean(valueIndex); + } + + @Override + public BooleanBlock filter(int... positions) { + try (var builder = blockFactory().newBooleanBlockBuilder(positions.length)) { + for (int pos : positions) { + if (isNull(pos)) { + builder.appendNull(); + continue; + } + int valueCount = getValueCount(pos); + int first = getFirstValueIndex(pos); + if (valueCount == 1) { + builder.appendBoolean(getBoolean(getFirstValueIndex(pos))); + } else { + builder.beginPositionEntry(); + for (int c = 0; c < valueCount; c++) { + builder.appendBoolean(getBoolean(first + c)); + } + builder.endPositionEntry(); + } + } + return builder.mvOrdering(mvOrdering()).build(); + } + } + + @Override + public ElementType elementType() { + return ElementType.BOOLEAN; + } + + @Override + public BooleanBlock expand() { + if (firstValueIndexes == null) { + incRef(); + return this; + } + // TODO use reference counting to share the vector + try (var builder = blockFactory().newBooleanBlockBuilder(firstValueIndexes[getPositionCount()])) { + for (int pos = 0; pos < getPositionCount(); pos++) { + if (isNull(pos)) { + builder.appendNull(); + continue; + } + int first = getFirstValueIndex(pos); + int end = first + getValueCount(pos); + for (int i = first; i < end; i++) { + builder.appendBoolean(getBoolean(i)); + } + } + return builder.mvOrdering(MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING).build(); + } + } + + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + } + + @Override + public long ramBytesUsed() { + return ramBytesUsedOnlyBlock() + RamUsageEstimator.sizeOf(vector); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof BooleanBlock that) { + return BooleanBlock.equals(this, that); + } + return false; + } + + @Override + public int hashCode() { + return BooleanBlock.hash(this); + } + + @Override + public String toString() { + return getClass().getSimpleName() + + "[positions=" + + getPositionCount() + + ", mvOrdering=" + + mvOrdering() + + ", ramBytesUsed=" + + vector.ramBytesUsed() + + ']'; + } + + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + + @Override + public void closeInternal() { + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java index 2621ec612944e..9618edb1fa77a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBigArrayVector.java @@ -12,30 +12,24 @@ import org.elasticsearch.core.Releasable; /** - * Vector implementation that defers to an enclosed BooleanArray. + * Vector implementation that defers to an enclosed {@link BitArray}. + * Does not take ownership of the array and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ public final class BooleanBigArrayVector extends AbstractVector implements BooleanVector, Releasable { - private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BooleanBigArrayVector.class); + private static final long BASE_RAM_BYTES_USED = 0; // FIXME private final BitArray values; - private final BooleanBlock block; - - public BooleanBigArrayVector(BitArray values, int positionCount) { - this(values, positionCount, BlockFactory.getNonBreakingInstance()); - } - public BooleanBigArrayVector(BitArray values, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.values = values; - this.block = new BooleanVectorBlock(this); } @Override public BooleanBlock asBlock() { - return block; + return new BooleanVectorBlock(this); } @Override @@ -71,11 +65,9 @@ public BooleanVector filter(int... positions) { } @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + public void closeInternal() { + // The circuit breaker that tracks the values {@link BitArray} is adjusted outside + // of this class. values.close(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java index 352ee783d8614..222de8c19850b 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlock.java @@ -18,7 +18,7 @@ * Block that stores boolean values. * This class is generated. Do not edit it. */ -public sealed interface BooleanBlock extends Block permits BooleanArrayBlock, BooleanVectorBlock, ConstantNullBlock { +public sealed interface BooleanBlock extends Block permits BooleanArrayBlock, BooleanVectorBlock, ConstantNullBlock, BooleanBigArrayBlock { /** * Retrieves the boolean value stored at the given value index. @@ -166,16 +166,6 @@ static int hash(BooleanBlock block) { return result; } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newBooleanBlockBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newBlockBuilder(int estimatedSize) { - return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - /** * Returns a builder. * @deprecated use {@link BlockFactory#newBooleanBlockBuilder} @@ -185,16 +175,6 @@ static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.newBooleanBlockBuilder(estimatedSize); } - /** - * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newConstantBooleanBlockWith} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static BooleanBlock newConstantBlockWith(boolean value, int positions) { - return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); - } - /** * Returns a constant block. * @deprecated use {@link BlockFactory#newConstantBooleanBlockWith} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java index 1c3549c06ca87..651422f6716fb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanBlockBuilder.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.util.BitArray; import java.util.Arrays; @@ -179,6 +180,31 @@ public BooleanBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } + private BooleanBlock buildBigArraysBlock() { + final BooleanBlock theBlock; + final BitArray array = new BitArray(valueCount, blockFactory.bigArrays()); + for (int i = 0; i < valueCount; i++) { + if (values[i]) { + array.set(i); + } + } + if (isDense() && singleValued()) { + theBlock = new BooleanBigArrayVector(array, positionCount, blockFactory).asBlock(); + } else { + theBlock = new BooleanBigArrayBlock(array, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); + } + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(theBlock.ramBytesUsed() - estimatedBytes - array.ramBytesUsed(), false); + return theBlock; + } + @Override public BooleanBlock build() { try { @@ -187,20 +213,26 @@ public BooleanBlock build() { if (hasNonNullValue && positionCount == 1 && valueCount == 1) { theBlock = blockFactory.newConstantBooleanBlockWith(values[0], 1, estimatedBytes); } else { - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - values = Arrays.copyOf(values, valueCount); - } - if (isDense() && singleValued()) { - theBlock = blockFactory.newBooleanArrayVector(values, positionCount, estimatedBytes).asBlock(); + if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { + theBlock = buildBigArraysBlock(); } else { - theBlock = blockFactory.newBooleanArrayBlock( - values, - positionCount, - firstValueIndexes, - nullsMask, - mvOrdering, - estimatedBytes - ); + if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { + adjustBreaker(valueCount * elementSize()); + values = Arrays.copyOf(values, valueCount); + adjustBreaker(-values.length * elementSize()); + } + if (isDense() && singleValued()) { + theBlock = blockFactory.newBooleanArrayVector(values, positionCount, estimatedBytes).asBlock(); + } else { + theBlock = blockFactory.newBooleanArrayBlock( + values, + positionCount, + firstValueIndexes, + nullsMask, + mvOrdering, + estimatedBytes + ); + } } } built(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java index ec4ab8f7def1c..7c86f40981ec7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVector.java @@ -101,40 +101,10 @@ default void writeTo(StreamOutput out) throws IOException { } } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. - * @deprecated use {@link BlockFactory#newBooleanVectorBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newVectorBuilder(int estimatedSize) { - return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Creates a builder that grows as needed. Prefer {@link #newVectorFixedBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newBooleanVectorBuilder} - */ - @Deprecated - static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.newBooleanVectorBuilder(estimatedSize); - } - - /** - * Creates a builder that never grows. Prefer this over {@link #newVectorBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newBooleanVectorFixedBuilder} - */ - @Deprecated - static FixedBuilder newVectorFixedBuilder(int size, BlockFactory blockFactory) { - return blockFactory.newBooleanVectorFixedBuilder(size); - } - /** * A builder that grows as needed. */ - sealed interface Builder extends Vector.Builder permits BooleanVectorBuilder { + sealed interface Builder extends Vector.Builder permits BooleanVectorBuilder, FixedBuilder { /** * Appends a boolean to the current entry. */ @@ -147,13 +117,11 @@ sealed interface Builder extends Vector.Builder permits BooleanVectorBuilder { /** * A builder that never grows. */ - sealed interface FixedBuilder extends Vector.Builder permits BooleanVectorFixedBuilder { + sealed interface FixedBuilder extends Builder permits BooleanVectorFixedBuilder { /** * Appends a boolean to the current entry. */ - FixedBuilder appendBoolean(boolean value); - @Override - BooleanVector build(); + FixedBuilder appendBoolean(boolean value); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java index 3fa4a90a6e734..d707e3cf901c1 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BooleanVectorBlock.java @@ -10,7 +10,7 @@ import org.elasticsearch.core.Releasables; /** - * Block view of a BooleanVector. + * Block view of a {@link BooleanVector}. Cannot represent multi-values or nulls. * This class is generated. Do not edit it. */ public final class BooleanVectorBlock extends AbstractVectorBlock implements BooleanBlock { @@ -73,11 +73,6 @@ public String toString() { return getClass().getSimpleName() + "[vector=" + vector + "]"; } - @Override - public boolean isReleased() { - return super.isReleased() || vector.isReleased(); - } - @Override public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java index 34d4e5aaa43e2..e2598d3d86b8f 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayBlock.java @@ -15,20 +15,17 @@ import java.util.BitSet; /** - * Block implementation that stores an array of BytesRef. + * Block implementation that stores values in a {@link BytesRefArrayVector}. + * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ -public final class BytesRefArrayBlock extends AbstractArrayBlock implements BytesRefBlock { +final class BytesRefArrayBlock extends AbstractArrayBlock implements BytesRefBlock { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BytesRefArrayBlock.class); - private final BytesRefArray values; + private final BytesRefArrayVector vector; - public BytesRefArrayBlock(BytesRefArray values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { - this(values, positionCount, firstValueIndexes, nulls, mvOrdering, BlockFactory.getNonBreakingInstance()); - } - - public BytesRefArrayBlock( + BytesRefArrayBlock( BytesRefArray values, int positionCount, int[] firstValueIndexes, @@ -37,7 +34,7 @@ public BytesRefArrayBlock( BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + this.vector = new BytesRefArrayVector(values, (int) values.size(), blockFactory); } @Override @@ -47,7 +44,7 @@ public BytesRefVector asVector() { @Override public BytesRef getBytesRef(int valueIndex, BytesRef dest) { - return values.get(valueIndex, dest); + return vector.getBytesRef(valueIndex, dest); } @Override @@ -86,7 +83,7 @@ public BytesRefBlock expand() { incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector final BytesRef scratch = new BytesRef(); try (var builder = blockFactory().newBytesRefBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { @@ -104,14 +101,13 @@ public BytesRefBlock expand() { } } - public static long ramBytesEstimated(BytesRefArray values, int[] firstValueIndexes, BitSet nullsMask) { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); } @Override public long ramBytesUsed() { - return ramBytesEstimated(values, firstValueIndexes, nullsMask); + return ramBytesUsedOnlyBlock() + vector.ramBytesUsed(); } @Override @@ -134,14 +130,20 @@ public String toString() { + getPositionCount() + ", mvOrdering=" + mvOrdering() - + ", values=" - + values.size() + + ", vector=" + + vector + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); - Releasables.closeExpectNoException(values); + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java index a8bb60f9f20fa..53e5ee61787c6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefArrayVector.java @@ -14,29 +14,23 @@ /** * Vector implementation that stores an array of BytesRef values. + * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ -public final class BytesRefArrayVector extends AbstractVector implements BytesRefVector { +final class BytesRefArrayVector extends AbstractVector implements BytesRefVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(BytesRefArrayVector.class); private final BytesRefArray values; - private final BytesRefBlock block; - - public BytesRefArrayVector(BytesRefArray values, int positionCount) { - this(values, positionCount, BlockFactory.getNonBreakingInstance()); - } - - public BytesRefArrayVector(BytesRefArray values, int positionCount, BlockFactory blockFactory) { + BytesRefArrayVector(BytesRefArray values, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.values = values; - this.block = new BytesRefVectorBlock(this); } @Override public BytesRefBlock asBlock() { - return block; + return new BytesRefVectorBlock(this); } @Override @@ -93,11 +87,9 @@ public String toString() { } @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + public void closeInternal() { + // The circuit breaker that tracks the values {@link BytesRefArray} is adjusted outside + // of this class. blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java index 50611f3e15130..0f121867a5347 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlock.java @@ -171,16 +171,6 @@ static int hash(BytesRefBlock block) { return result; } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newBytesRefBlockBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newBlockBuilder(int estimatedSize) { - return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - /** * Returns a builder. * @deprecated use {@link BlockFactory#newBytesRefBlockBuilder} @@ -190,16 +180,6 @@ static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.newBytesRefBlockBuilder(estimatedSize); } - /** - * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newConstantBytesRefBlockWith} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static BytesRefBlock newConstantBlockWith(BytesRef value, int positions) { - return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); - } - /** * Returns a constant block. * @deprecated use {@link BlockFactory#newConstantBytesRefBlockWith} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java index 70e20ac9f1d00..f2bed6e42a039 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefBlockBuilder.java @@ -190,40 +190,46 @@ public BytesRefBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } + private BytesRefBlock buildFromBytesArray() { + assert estimatedBytes == 0 || firstValueIndexes != null; + final BytesRefBlock theBlock; + if (hasNonNullValue && positionCount == 1 && valueCount == 1) { + theBlock = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory).asBlock(); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(theBlock.ramBytesUsed() - estimatedBytes, false); + Releasables.closeExpectNoException(values); + } else { + if (isDense() && singleValued()) { + theBlock = new BytesRefArrayVector(values, positionCount, blockFactory).asBlock(); + } else { + theBlock = new BytesRefArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); + } + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(theBlock.ramBytesUsed() - estimatedBytes - values.bigArraysRamBytesUsed(), false); + } + return theBlock; + } + @Override public BytesRefBlock build() { try { finish(); BytesRefBlock theBlock; - assert estimatedBytes == 0 || firstValueIndexes != null; - if (hasNonNullValue && positionCount == 1 && valueCount == 1) { - theBlock = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory).asBlock(); - /* - * Update the breaker with the actual bytes used. - * We pass false below even though we've used the bytes. That's weird, - * but if we break here we will throw away the used memory, letting - * it be deallocated. The exception will bubble up and the builder will - * still technically be open, meaning the calling code should close it - * which will return all used memory to the breaker. - */ - blockFactory.adjustBreaker(theBlock.ramBytesUsed() - estimatedBytes, false); - Releasables.closeExpectNoException(values); - } else { - if (isDense() && singleValued()) { - theBlock = new BytesRefArrayVector(values, positionCount, blockFactory).asBlock(); - } else { - theBlock = new BytesRefArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); - } - /* - * Update the breaker with the actual bytes used. - * We pass false below even though we've used the bytes. That's weird, - * but if we break here we will throw away the used memory, letting - * it be deallocated. The exception will bubble up and the builder will - * still technically be open, meaning the calling code should close it - * which will return all used memory to the breaker. - */ - blockFactory.adjustBreaker(theBlock.ramBytesUsed() - estimatedBytes - values.bigArraysRamBytesUsed(), false); - } + theBlock = buildFromBytesArray(); values = null; built(); return theBlock; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java index b7011666b981d..5c56ece72c298 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVector.java @@ -101,25 +101,6 @@ default void writeTo(StreamOutput out) throws IOException { } } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. - * @deprecated use {@link BlockFactory#newBytesRefVectorBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newVectorBuilder(int estimatedSize) { - return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Creates a builder that grows as needed. - * @deprecated use {@link BlockFactory#newBytesRefVectorBuilder} - */ - @Deprecated - static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.newBytesRefVectorBuilder(estimatedSize); - } - /** * A builder that grows as needed. */ diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java index 9d3f69bfaa981..92f93d5d23a49 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/BytesRefVectorBlock.java @@ -11,7 +11,7 @@ import org.elasticsearch.core.Releasables; /** - * Block view of a BytesRefVector. + * Block view of a {@link BytesRefVector}. Cannot represent multi-values or nulls. * This class is generated. Do not edit it. */ public final class BytesRefVectorBlock extends AbstractVectorBlock implements BytesRefBlock { @@ -74,11 +74,6 @@ public String toString() { return getClass().getSimpleName() + "[vector=" + vector + "]"; } - @Override - public boolean isReleased() { - return super.isReleased() || vector.isReleased(); - } - @Override public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java index b636d89a206e0..16d70d1a0e800 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBooleanVector.java @@ -13,22 +13,15 @@ * Vector implementation that stores a constant boolean value. * This class is generated. Do not edit it. */ -public final class ConstantBooleanVector extends AbstractVector implements BooleanVector { +final class ConstantBooleanVector extends AbstractVector implements BooleanVector { static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ConstantBooleanVector.class); private final boolean value; - private final BooleanBlock block; - - public ConstantBooleanVector(boolean value, int positionCount) { - this(value, positionCount, BlockFactory.getNonBreakingInstance()); - } - - public ConstantBooleanVector(boolean value, int positionCount, BlockFactory blockFactory) { + ConstantBooleanVector(boolean value, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.value = value; - this.block = new BooleanVectorBlock(this); } @Override @@ -38,12 +31,12 @@ public boolean getBoolean(int position) { @Override public BooleanBlock asBlock() { - return block; + return new BooleanVectorBlock(this); } @Override public BooleanVector filter(int... positions) { - return new ConstantBooleanVector(value, positions.length); + return blockFactory().newConstantBooleanVector(value, positions.length); } @Override @@ -77,13 +70,4 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } - - @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; - blockFactory().adjustBreaker(-ramBytesUsed(), true); - } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java index be34db592b228..57ec1c945ade5 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantBytesRefVector.java @@ -14,22 +14,15 @@ * Vector implementation that stores a constant BytesRef value. * This class is generated. Do not edit it. */ -public final class ConstantBytesRefVector extends AbstractVector implements BytesRefVector { +final class ConstantBytesRefVector extends AbstractVector implements BytesRefVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ConstantBytesRefVector.class) + RamUsageEstimator .shallowSizeOfInstance(BytesRef.class); private final BytesRef value; - private final BytesRefBlock block; - - public ConstantBytesRefVector(BytesRef value, int positionCount) { - this(value, positionCount, BlockFactory.getNonBreakingInstance()); - } - - public ConstantBytesRefVector(BytesRef value, int positionCount, BlockFactory blockFactory) { + ConstantBytesRefVector(BytesRef value, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.value = value; - this.block = new BytesRefVectorBlock(this); } @Override @@ -39,12 +32,12 @@ public BytesRef getBytesRef(int position, BytesRef ignore) { @Override public BytesRefBlock asBlock() { - return block; + return new BytesRefVectorBlock(this); } @Override public BytesRefVector filter(int... positions) { - return new ConstantBytesRefVector(value, positions.length); + return blockFactory().newConstantBytesRefVector(value, positions.length); } @Override @@ -82,13 +75,4 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } - - @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; - blockFactory().adjustBreaker(-ramBytesUsed(), true); - } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java index f6cce49aa3d42..a783f0243313e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantDoubleVector.java @@ -13,22 +13,15 @@ * Vector implementation that stores a constant double value. * This class is generated. Do not edit it. */ -public final class ConstantDoubleVector extends AbstractVector implements DoubleVector { +final class ConstantDoubleVector extends AbstractVector implements DoubleVector { static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ConstantDoubleVector.class); private final double value; - private final DoubleBlock block; - - public ConstantDoubleVector(double value, int positionCount) { - this(value, positionCount, BlockFactory.getNonBreakingInstance()); - } - - public ConstantDoubleVector(double value, int positionCount, BlockFactory blockFactory) { + ConstantDoubleVector(double value, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.value = value; - this.block = new DoubleVectorBlock(this); } @Override @@ -38,12 +31,12 @@ public double getDouble(int position) { @Override public DoubleBlock asBlock() { - return block; + return new DoubleVectorBlock(this); } @Override public DoubleVector filter(int... positions) { - return new ConstantDoubleVector(value, positions.length); + return blockFactory().newConstantDoubleVector(value, positions.length); } @Override @@ -77,13 +70,4 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } - - @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; - blockFactory().adjustBreaker(-ramBytesUsed(), true); - } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java index fa7b9223d5107..56573e985c387 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantIntVector.java @@ -13,22 +13,15 @@ * Vector implementation that stores a constant int value. * This class is generated. Do not edit it. */ -public final class ConstantIntVector extends AbstractVector implements IntVector { +final class ConstantIntVector extends AbstractVector implements IntVector { static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ConstantIntVector.class); private final int value; - private final IntBlock block; - - public ConstantIntVector(int value, int positionCount) { - this(value, positionCount, BlockFactory.getNonBreakingInstance()); - } - - public ConstantIntVector(int value, int positionCount, BlockFactory blockFactory) { + ConstantIntVector(int value, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.value = value; - this.block = new IntVectorBlock(this); } @Override @@ -38,12 +31,12 @@ public int getInt(int position) { @Override public IntBlock asBlock() { - return block; + return new IntVectorBlock(this); } @Override public IntVector filter(int... positions) { - return new ConstantIntVector(value, positions.length); + return blockFactory().newConstantIntVector(value, positions.length); } @Override @@ -77,13 +70,4 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } - - @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; - blockFactory().adjustBreaker(-ramBytesUsed(), true); - } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java index 21d4d81dfd193..0173f1c1d4d7a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/ConstantLongVector.java @@ -13,22 +13,15 @@ * Vector implementation that stores a constant long value. * This class is generated. Do not edit it. */ -public final class ConstantLongVector extends AbstractVector implements LongVector { +final class ConstantLongVector extends AbstractVector implements LongVector { static final long RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ConstantLongVector.class); private final long value; - private final LongBlock block; - - public ConstantLongVector(long value, int positionCount) { - this(value, positionCount, BlockFactory.getNonBreakingInstance()); - } - - public ConstantLongVector(long value, int positionCount, BlockFactory blockFactory) { + ConstantLongVector(long value, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.value = value; - this.block = new LongVectorBlock(this); } @Override @@ -38,12 +31,12 @@ public long getLong(int position) { @Override public LongBlock asBlock() { - return block; + return new LongVectorBlock(this); } @Override public LongVector filter(int... positions) { - return new ConstantLongVector(value, positions.length); + return blockFactory().newConstantLongVector(value, positions.length); } @Override @@ -77,13 +70,4 @@ public int hashCode() { public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } - - @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; - blockFactory().adjustBreaker(-ramBytesUsed(), true); - } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java index db3546c73c054..96e96ac459a50 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayBlock.java @@ -8,25 +8,21 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; -import java.util.Arrays; import java.util.BitSet; /** - * Block implementation that stores an array of double. + * Block implementation that stores values in a {@link DoubleArrayVector}. * This class is generated. Do not edit it. */ -public final class DoubleArrayBlock extends AbstractArrayBlock implements DoubleBlock { +final class DoubleArrayBlock extends AbstractArrayBlock implements DoubleBlock { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DoubleArrayBlock.class); - private final double[] values; + private final DoubleArrayVector vector; - public DoubleArrayBlock(double[] values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { - this(values, positionCount, firstValueIndexes, nulls, mvOrdering, BlockFactory.getNonBreakingInstance()); - } - - public DoubleArrayBlock( + DoubleArrayBlock( double[] values, int positionCount, int[] firstValueIndexes, @@ -35,7 +31,7 @@ public DoubleArrayBlock( BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + this.vector = new DoubleArrayVector(values, values.length, blockFactory); } @Override @@ -45,7 +41,7 @@ public DoubleVector asVector() { @Override public double getDouble(int valueIndex) { - return values[valueIndex]; + return vector.getDouble(valueIndex); } @Override @@ -83,7 +79,7 @@ public DoubleBlock expand() { incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector try (var builder = blockFactory().newDoubleBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { @@ -100,14 +96,13 @@ public DoubleBlock expand() { } } - public static long ramBytesEstimated(double[] values, int[] firstValueIndexes, BitSet nullsMask) { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); } @Override public long ramBytesUsed() { - return ramBytesEstimated(values, firstValueIndexes, nullsMask); + return ramBytesUsedOnlyBlock() + vector.ramBytesUsed(); } @Override @@ -130,13 +125,20 @@ public String toString() { + getPositionCount() + ", mvOrdering=" + mvOrdering() - + ", values=" - + Arrays.toString(values) + + ", vector=" + + vector + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - blockFactory().adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java index 08e51b0e313d8..bb6d9c22539a6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleArrayVector.java @@ -15,27 +15,20 @@ * Vector implementation that stores an array of double values. * This class is generated. Do not edit it. */ -public final class DoubleArrayVector extends AbstractVector implements DoubleVector { +final class DoubleArrayVector extends AbstractVector implements DoubleVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DoubleArrayVector.class); private final double[] values; - private final DoubleBlock block; - - public DoubleArrayVector(double[] values, int positionCount) { - this(values, positionCount, BlockFactory.getNonBreakingInstance()); - } - - public DoubleArrayVector(double[] values, int positionCount, BlockFactory blockFactory) { + DoubleArrayVector(double[] values, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.values = values; - this.block = new DoubleVectorBlock(this); } @Override public DoubleBlock asBlock() { - return block; + return new DoubleVectorBlock(this); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java new file mode 100644 index 0000000000000..5b1dcbfc9d728 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayBlock.java @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.DoubleArray; +import org.elasticsearch.core.Releasables; + +import java.util.BitSet; + +/** + * Block implementation that stores values in a {@link DoubleBigArrayVector}. Does not take ownership of the given + * {@link DoubleArray} and does not adjust circuit breakers to account for it. + * This class is generated. Do not edit it. + */ +public final class DoubleBigArrayBlock extends AbstractArrayBlock implements DoubleBlock { + + private static final long BASE_RAM_BYTES_USED = 0; // TODO: fix this + private final DoubleBigArrayVector vector; + + public DoubleBigArrayBlock( + DoubleArray values, + int positionCount, + int[] firstValueIndexes, + BitSet nulls, + MvOrdering mvOrdering, + BlockFactory blockFactory + ) { + super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); + this.vector = new DoubleBigArrayVector(values, (int) values.size(), blockFactory); + } + + @Override + public DoubleVector asVector() { + return null; + } + + @Override + public double getDouble(int valueIndex) { + return vector.getDouble(valueIndex); + } + + @Override + public DoubleBlock filter(int... positions) { + try (var builder = blockFactory().newDoubleBlockBuilder(positions.length)) { + for (int pos : positions) { + if (isNull(pos)) { + builder.appendNull(); + continue; + } + int valueCount = getValueCount(pos); + int first = getFirstValueIndex(pos); + if (valueCount == 1) { + builder.appendDouble(getDouble(getFirstValueIndex(pos))); + } else { + builder.beginPositionEntry(); + for (int c = 0; c < valueCount; c++) { + builder.appendDouble(getDouble(first + c)); + } + builder.endPositionEntry(); + } + } + return builder.mvOrdering(mvOrdering()).build(); + } + } + + @Override + public ElementType elementType() { + return ElementType.DOUBLE; + } + + @Override + public DoubleBlock expand() { + if (firstValueIndexes == null) { + incRef(); + return this; + } + // TODO use reference counting to share the vector + try (var builder = blockFactory().newDoubleBlockBuilder(firstValueIndexes[getPositionCount()])) { + for (int pos = 0; pos < getPositionCount(); pos++) { + if (isNull(pos)) { + builder.appendNull(); + continue; + } + int first = getFirstValueIndex(pos); + int end = first + getValueCount(pos); + for (int i = first; i < end; i++) { + builder.appendDouble(getDouble(i)); + } + } + return builder.mvOrdering(MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING).build(); + } + } + + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + } + + @Override + public long ramBytesUsed() { + return ramBytesUsedOnlyBlock() + RamUsageEstimator.sizeOf(vector); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof DoubleBlock that) { + return DoubleBlock.equals(this, that); + } + return false; + } + + @Override + public int hashCode() { + return DoubleBlock.hash(this); + } + + @Override + public String toString() { + return getClass().getSimpleName() + + "[positions=" + + getPositionCount() + + ", mvOrdering=" + + mvOrdering() + + ", ramBytesUsed=" + + vector.ramBytesUsed() + + ']'; + } + + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + + @Override + public void closeInternal() { + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java index 476b94ad3fa05..d6fab63a6b6ff 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBigArrayVector.java @@ -12,30 +12,24 @@ import org.elasticsearch.core.Releasable; /** - * Vector implementation that defers to an enclosed DoubleArray. + * Vector implementation that defers to an enclosed {@link DoubleArray}. + * Does not take ownership of the array and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ public final class DoubleBigArrayVector extends AbstractVector implements DoubleVector, Releasable { - private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(DoubleBigArrayVector.class); + private static final long BASE_RAM_BYTES_USED = 0; // FIXME private final DoubleArray values; - private final DoubleBlock block; - - public DoubleBigArrayVector(DoubleArray values, int positionCount) { - this(values, positionCount, BlockFactory.getNonBreakingInstance()); - } - public DoubleBigArrayVector(DoubleArray values, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.values = values; - this.block = new DoubleVectorBlock(this); } @Override public DoubleBlock asBlock() { - return block; + return new DoubleVectorBlock(this); } @Override @@ -69,11 +63,9 @@ public DoubleVector filter(int... positions) { } @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + public void closeInternal() { + // The circuit breaker that tracks the values {@link DoubleArray} is adjusted outside + // of this class. values.close(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java index 31d0000d28515..a4ce5e69534bb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlock.java @@ -18,7 +18,7 @@ * Block that stores double values. * This class is generated. Do not edit it. */ -public sealed interface DoubleBlock extends Block permits DoubleArrayBlock, DoubleVectorBlock, ConstantNullBlock { +public sealed interface DoubleBlock extends Block permits DoubleArrayBlock, DoubleVectorBlock, ConstantNullBlock, DoubleBigArrayBlock { /** * Retrieves the double value stored at the given value index. @@ -167,16 +167,6 @@ static int hash(DoubleBlock block) { return result; } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newDoubleBlockBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newBlockBuilder(int estimatedSize) { - return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - /** * Returns a builder. * @deprecated use {@link BlockFactory#newDoubleBlockBuilder} @@ -186,16 +176,6 @@ static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.newDoubleBlockBuilder(estimatedSize); } - /** - * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newConstantDoubleBlockWith} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static DoubleBlock newConstantBlockWith(double value, int positions) { - return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); - } - /** * Returns a constant block. * @deprecated use {@link BlockFactory#newConstantDoubleBlockWith} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java index 7781e4c353e8e..4e0fa1180a2ff 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleBlockBuilder.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.util.DoubleArray; import java.util.Arrays; @@ -179,6 +180,29 @@ public DoubleBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } + private DoubleBlock buildBigArraysBlock() { + final DoubleBlock theBlock; + final DoubleArray array = blockFactory.bigArrays().newDoubleArray(valueCount, false); + for (int i = 0; i < valueCount; i++) { + array.set(i, values[i]); + } + if (isDense() && singleValued()) { + theBlock = new DoubleBigArrayVector(array, positionCount, blockFactory).asBlock(); + } else { + theBlock = new DoubleBigArrayBlock(array, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); + } + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(theBlock.ramBytesUsed() - estimatedBytes - array.ramBytesUsed(), false); + return theBlock; + } + @Override public DoubleBlock build() { try { @@ -187,20 +211,26 @@ public DoubleBlock build() { if (hasNonNullValue && positionCount == 1 && valueCount == 1) { theBlock = blockFactory.newConstantDoubleBlockWith(values[0], 1, estimatedBytes); } else { - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - values = Arrays.copyOf(values, valueCount); - } - if (isDense() && singleValued()) { - theBlock = blockFactory.newDoubleArrayVector(values, positionCount, estimatedBytes).asBlock(); + if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { + theBlock = buildBigArraysBlock(); } else { - theBlock = blockFactory.newDoubleArrayBlock( - values, - positionCount, - firstValueIndexes, - nullsMask, - mvOrdering, - estimatedBytes - ); + if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { + adjustBreaker(valueCount * elementSize()); + values = Arrays.copyOf(values, valueCount); + adjustBreaker(-values.length * elementSize()); + } + if (isDense() && singleValued()) { + theBlock = blockFactory.newDoubleArrayVector(values, positionCount, estimatedBytes).asBlock(); + } else { + theBlock = blockFactory.newDoubleArrayBlock( + values, + positionCount, + firstValueIndexes, + nullsMask, + mvOrdering, + estimatedBytes + ); + } } } built(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java index acabd0deb17f6..f54044874acdd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVector.java @@ -102,40 +102,10 @@ default void writeTo(StreamOutput out) throws IOException { } } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. - * @deprecated use {@link BlockFactory#newDoubleVectorBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newVectorBuilder(int estimatedSize) { - return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Creates a builder that grows as needed. Prefer {@link #newVectorFixedBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newDoubleVectorBuilder} - */ - @Deprecated - static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.newDoubleVectorBuilder(estimatedSize); - } - - /** - * Creates a builder that never grows. Prefer this over {@link #newVectorBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newDoubleVectorFixedBuilder} - */ - @Deprecated - static FixedBuilder newVectorFixedBuilder(int size, BlockFactory blockFactory) { - return blockFactory.newDoubleVectorFixedBuilder(size); - } - /** * A builder that grows as needed. */ - sealed interface Builder extends Vector.Builder permits DoubleVectorBuilder { + sealed interface Builder extends Vector.Builder permits DoubleVectorBuilder, FixedBuilder { /** * Appends a double to the current entry. */ @@ -148,13 +118,11 @@ sealed interface Builder extends Vector.Builder permits DoubleVectorBuilder { /** * A builder that never grows. */ - sealed interface FixedBuilder extends Vector.Builder permits DoubleVectorFixedBuilder { + sealed interface FixedBuilder extends Builder permits DoubleVectorFixedBuilder { /** * Appends a double to the current entry. */ - FixedBuilder appendDouble(double value); - @Override - DoubleVector build(); + FixedBuilder appendDouble(double value); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java index b23a448c58336..2aa8e07c25604 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/DoubleVectorBlock.java @@ -10,7 +10,7 @@ import org.elasticsearch.core.Releasables; /** - * Block view of a DoubleVector. + * Block view of a {@link DoubleVector}. Cannot represent multi-values or nulls. * This class is generated. Do not edit it. */ public final class DoubleVectorBlock extends AbstractVectorBlock implements DoubleBlock { @@ -73,11 +73,6 @@ public String toString() { return getClass().getSimpleName() + "[vector=" + vector + "]"; } - @Override - public boolean isReleased() { - return super.isReleased() || vector.isReleased(); - } - @Override public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java index 111fc0c757af1..e8f10ced11adc 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayBlock.java @@ -8,25 +8,21 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; -import java.util.Arrays; import java.util.BitSet; /** - * Block implementation that stores an array of int. + * Block implementation that stores values in a {@link IntArrayVector}. * This class is generated. Do not edit it. */ -public final class IntArrayBlock extends AbstractArrayBlock implements IntBlock { +final class IntArrayBlock extends AbstractArrayBlock implements IntBlock { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IntArrayBlock.class); - private final int[] values; + private final IntArrayVector vector; - public IntArrayBlock(int[] values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { - this(values, positionCount, firstValueIndexes, nulls, mvOrdering, BlockFactory.getNonBreakingInstance()); - } - - public IntArrayBlock( + IntArrayBlock( int[] values, int positionCount, int[] firstValueIndexes, @@ -35,7 +31,7 @@ public IntArrayBlock( BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + this.vector = new IntArrayVector(values, values.length, blockFactory); } @Override @@ -45,7 +41,7 @@ public IntVector asVector() { @Override public int getInt(int valueIndex) { - return values[valueIndex]; + return vector.getInt(valueIndex); } @Override @@ -83,7 +79,7 @@ public IntBlock expand() { incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector try (var builder = blockFactory().newIntBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { @@ -100,14 +96,13 @@ public IntBlock expand() { } } - public static long ramBytesEstimated(int[] values, int[] firstValueIndexes, BitSet nullsMask) { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); } @Override public long ramBytesUsed() { - return ramBytesEstimated(values, firstValueIndexes, nullsMask); + return ramBytesUsedOnlyBlock() + vector.ramBytesUsed(); } @Override @@ -130,13 +125,20 @@ public String toString() { + getPositionCount() + ", mvOrdering=" + mvOrdering() - + ", values=" - + Arrays.toString(values) + + ", vector=" + + vector + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - blockFactory().adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java index 9c8c27efa0806..0576b77a0d700 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntArrayVector.java @@ -15,27 +15,20 @@ * Vector implementation that stores an array of int values. * This class is generated. Do not edit it. */ -public final class IntArrayVector extends AbstractVector implements IntVector { +final class IntArrayVector extends AbstractVector implements IntVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IntArrayVector.class); private final int[] values; - private final IntBlock block; - - public IntArrayVector(int[] values, int positionCount) { - this(values, positionCount, BlockFactory.getNonBreakingInstance()); - } - - public IntArrayVector(int[] values, int positionCount, BlockFactory blockFactory) { + IntArrayVector(int[] values, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.values = values; - this.block = new IntVectorBlock(this); } @Override public IntBlock asBlock() { - return block; + return new IntVectorBlock(this); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java new file mode 100644 index 0000000000000..ad6033fb452a0 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayBlock.java @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.IntArray; +import org.elasticsearch.core.Releasables; + +import java.util.BitSet; + +/** + * Block implementation that stores values in a {@link IntBigArrayVector}. Does not take ownership of the given + * {@link IntArray} and does not adjust circuit breakers to account for it. + * This class is generated. Do not edit it. + */ +public final class IntBigArrayBlock extends AbstractArrayBlock implements IntBlock { + + private static final long BASE_RAM_BYTES_USED = 0; // TODO: fix this + private final IntBigArrayVector vector; + + public IntBigArrayBlock( + IntArray values, + int positionCount, + int[] firstValueIndexes, + BitSet nulls, + MvOrdering mvOrdering, + BlockFactory blockFactory + ) { + super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); + this.vector = new IntBigArrayVector(values, (int) values.size(), blockFactory); + } + + @Override + public IntVector asVector() { + return null; + } + + @Override + public int getInt(int valueIndex) { + return vector.getInt(valueIndex); + } + + @Override + public IntBlock filter(int... positions) { + try (var builder = blockFactory().newIntBlockBuilder(positions.length)) { + for (int pos : positions) { + if (isNull(pos)) { + builder.appendNull(); + continue; + } + int valueCount = getValueCount(pos); + int first = getFirstValueIndex(pos); + if (valueCount == 1) { + builder.appendInt(getInt(getFirstValueIndex(pos))); + } else { + builder.beginPositionEntry(); + for (int c = 0; c < valueCount; c++) { + builder.appendInt(getInt(first + c)); + } + builder.endPositionEntry(); + } + } + return builder.mvOrdering(mvOrdering()).build(); + } + } + + @Override + public ElementType elementType() { + return ElementType.INT; + } + + @Override + public IntBlock expand() { + if (firstValueIndexes == null) { + incRef(); + return this; + } + // TODO use reference counting to share the vector + try (var builder = blockFactory().newIntBlockBuilder(firstValueIndexes[getPositionCount()])) { + for (int pos = 0; pos < getPositionCount(); pos++) { + if (isNull(pos)) { + builder.appendNull(); + continue; + } + int first = getFirstValueIndex(pos); + int end = first + getValueCount(pos); + for (int i = first; i < end; i++) { + builder.appendInt(getInt(i)); + } + } + return builder.mvOrdering(MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING).build(); + } + } + + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + } + + @Override + public long ramBytesUsed() { + return ramBytesUsedOnlyBlock() + RamUsageEstimator.sizeOf(vector); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof IntBlock that) { + return IntBlock.equals(this, that); + } + return false; + } + + @Override + public int hashCode() { + return IntBlock.hash(this); + } + + @Override + public String toString() { + return getClass().getSimpleName() + + "[positions=" + + getPositionCount() + + ", mvOrdering=" + + mvOrdering() + + ", ramBytesUsed=" + + vector.ramBytesUsed() + + ']'; + } + + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + + @Override + public void closeInternal() { + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java index 76d2797f2a64b..c1799c06713e9 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBigArrayVector.java @@ -12,30 +12,24 @@ import org.elasticsearch.core.Releasable; /** - * Vector implementation that defers to an enclosed IntArray. + * Vector implementation that defers to an enclosed {@link IntArray}. + * Does not take ownership of the array and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ public final class IntBigArrayVector extends AbstractVector implements IntVector, Releasable { - private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(IntBigArrayVector.class); + private static final long BASE_RAM_BYTES_USED = 0; // FIXME private final IntArray values; - private final IntBlock block; - - public IntBigArrayVector(IntArray values, int positionCount) { - this(values, positionCount, BlockFactory.getNonBreakingInstance()); - } - public IntBigArrayVector(IntArray values, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.values = values; - this.block = new IntVectorBlock(this); } @Override public IntBlock asBlock() { - return block; + return new IntVectorBlock(this); } @Override @@ -69,11 +63,9 @@ public IntVector filter(int... positions) { } @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + public void closeInternal() { + // The circuit breaker that tracks the values {@link IntArray} is adjusted outside + // of this class. values.close(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java index 3909d2b6761be..7ed14e7a55105 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlock.java @@ -18,7 +18,7 @@ * Block that stores int values. * This class is generated. Do not edit it. */ -public sealed interface IntBlock extends Block permits IntArrayBlock, IntVectorBlock, ConstantNullBlock { +public sealed interface IntBlock extends Block permits IntArrayBlock, IntVectorBlock, ConstantNullBlock, IntBigArrayBlock { /** * Retrieves the int value stored at the given value index. @@ -166,16 +166,6 @@ static int hash(IntBlock block) { return result; } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newIntBlockBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newBlockBuilder(int estimatedSize) { - return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - /** * Returns a builder. * @deprecated use {@link BlockFactory#newIntBlockBuilder} @@ -185,16 +175,6 @@ static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.newIntBlockBuilder(estimatedSize); } - /** - * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newConstantIntBlockWith} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static IntBlock newConstantBlockWith(int value, int positions) { - return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); - } - /** * Returns a constant block. * @deprecated use {@link BlockFactory#newConstantIntBlockWith} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java index 49c3b156ce44b..5f67c0683a5d7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntBlockBuilder.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.util.IntArray; import java.util.Arrays; @@ -179,6 +180,29 @@ public IntBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } + private IntBlock buildBigArraysBlock() { + final IntBlock theBlock; + final IntArray array = blockFactory.bigArrays().newIntArray(valueCount, false); + for (int i = 0; i < valueCount; i++) { + array.set(i, values[i]); + } + if (isDense() && singleValued()) { + theBlock = new IntBigArrayVector(array, positionCount, blockFactory).asBlock(); + } else { + theBlock = new IntBigArrayBlock(array, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); + } + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(theBlock.ramBytesUsed() - estimatedBytes - array.ramBytesUsed(), false); + return theBlock; + } + @Override public IntBlock build() { try { @@ -187,20 +211,26 @@ public IntBlock build() { if (hasNonNullValue && positionCount == 1 && valueCount == 1) { theBlock = blockFactory.newConstantIntBlockWith(values[0], 1, estimatedBytes); } else { - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - values = Arrays.copyOf(values, valueCount); - } - if (isDense() && singleValued()) { - theBlock = blockFactory.newIntArrayVector(values, positionCount, estimatedBytes).asBlock(); + if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { + theBlock = buildBigArraysBlock(); } else { - theBlock = blockFactory.newIntArrayBlock( - values, - positionCount, - firstValueIndexes, - nullsMask, - mvOrdering, - estimatedBytes - ); + if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { + adjustBreaker(valueCount * elementSize()); + values = Arrays.copyOf(values, valueCount); + adjustBreaker(-values.length * elementSize()); + } + if (isDense() && singleValued()) { + theBlock = blockFactory.newIntArrayVector(values, positionCount, estimatedBytes).asBlock(); + } else { + theBlock = blockFactory.newIntArrayBlock( + values, + positionCount, + firstValueIndexes, + nullsMask, + mvOrdering, + estimatedBytes + ); + } } } built(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java index 645288565c431..bc7e3c87ec33d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVector.java @@ -101,36 +101,6 @@ default void writeTo(StreamOutput out) throws IOException { } } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. - * @deprecated use {@link BlockFactory#newIntVectorBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newVectorBuilder(int estimatedSize) { - return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Creates a builder that grows as needed. Prefer {@link #newVectorFixedBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newIntVectorBuilder} - */ - @Deprecated - static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.newIntVectorBuilder(estimatedSize); - } - - /** - * Creates a builder that never grows. Prefer this over {@link #newVectorBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newIntVectorFixedBuilder} - */ - @Deprecated - static FixedBuilder newVectorFixedBuilder(int size, BlockFactory blockFactory) { - return blockFactory.newIntVectorFixedBuilder(size); - } - /** Create a vector for a range of ints. */ static IntVector range(int startInclusive, int endExclusive, BlockFactory blockFactory) { int[] values = new int[endExclusive - startInclusive]; @@ -143,7 +113,7 @@ static IntVector range(int startInclusive, int endExclusive, BlockFactory blockF /** * A builder that grows as needed. */ - sealed interface Builder extends Vector.Builder permits IntVectorBuilder { + sealed interface Builder extends Vector.Builder permits IntVectorBuilder, FixedBuilder { /** * Appends a int to the current entry. */ @@ -156,13 +126,11 @@ sealed interface Builder extends Vector.Builder permits IntVectorBuilder { /** * A builder that never grows. */ - sealed interface FixedBuilder extends Vector.Builder permits IntVectorFixedBuilder { + sealed interface FixedBuilder extends Builder permits IntVectorFixedBuilder { /** * Appends a int to the current entry. */ - FixedBuilder appendInt(int value); - @Override - IntVector build(); + FixedBuilder appendInt(int value); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java index 028ef35577753..97a4a48533e3a 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/IntVectorBlock.java @@ -10,7 +10,7 @@ import org.elasticsearch.core.Releasables; /** - * Block view of a IntVector. + * Block view of a {@link IntVector}. Cannot represent multi-values or nulls. * This class is generated. Do not edit it. */ public final class IntVectorBlock extends AbstractVectorBlock implements IntBlock { @@ -73,11 +73,6 @@ public String toString() { return getClass().getSimpleName() + "[vector=" + vector + "]"; } - @Override - public boolean isReleased() { - return super.isReleased() || vector.isReleased(); - } - @Override public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java index 9e0fa9bcc2993..792f9b267e748 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayBlock.java @@ -8,25 +8,21 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; -import java.util.Arrays; import java.util.BitSet; /** - * Block implementation that stores an array of long. + * Block implementation that stores values in a {@link LongArrayVector}. * This class is generated. Do not edit it. */ -public final class LongArrayBlock extends AbstractArrayBlock implements LongBlock { +final class LongArrayBlock extends AbstractArrayBlock implements LongBlock { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LongArrayBlock.class); - private final long[] values; + private final LongArrayVector vector; - public LongArrayBlock(long[] values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { - this(values, positionCount, firstValueIndexes, nulls, mvOrdering, BlockFactory.getNonBreakingInstance()); - } - - public LongArrayBlock( + LongArrayBlock( long[] values, int positionCount, int[] firstValueIndexes, @@ -35,7 +31,7 @@ public LongArrayBlock( BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + this.vector = new LongArrayVector(values, values.length, blockFactory); } @Override @@ -45,7 +41,7 @@ public LongVector asVector() { @Override public long getLong(int valueIndex) { - return values[valueIndex]; + return vector.getLong(valueIndex); } @Override @@ -83,7 +79,7 @@ public LongBlock expand() { incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector try (var builder = blockFactory().newLongBlockBuilder(firstValueIndexes[getPositionCount()])) { for (int pos = 0; pos < getPositionCount(); pos++) { if (isNull(pos)) { @@ -100,14 +96,13 @@ public LongBlock expand() { } } - public static long ramBytesEstimated(long[] values, int[] firstValueIndexes, BitSet nullsMask) { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); } @Override public long ramBytesUsed() { - return ramBytesEstimated(values, firstValueIndexes, nullsMask); + return ramBytesUsedOnlyBlock() + vector.ramBytesUsed(); } @Override @@ -130,13 +125,20 @@ public String toString() { + getPositionCount() + ", mvOrdering=" + mvOrdering() - + ", values=" - + Arrays.toString(values) + + ", vector=" + + vector + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - blockFactory().adjustBreaker(-ramBytesUsed(), true); + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java index 0a3ada321d94c..3c5f6b7448321 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongArrayVector.java @@ -15,27 +15,20 @@ * Vector implementation that stores an array of long values. * This class is generated. Do not edit it. */ -public final class LongArrayVector extends AbstractVector implements LongVector { +final class LongArrayVector extends AbstractVector implements LongVector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LongArrayVector.class); private final long[] values; - private final LongBlock block; - - public LongArrayVector(long[] values, int positionCount) { - this(values, positionCount, BlockFactory.getNonBreakingInstance()); - } - - public LongArrayVector(long[] values, int positionCount, BlockFactory blockFactory) { + LongArrayVector(long[] values, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.values = values; - this.block = new LongVectorBlock(this); } @Override public LongBlock asBlock() { - return block; + return new LongVectorBlock(this); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java new file mode 100644 index 0000000000000..dc19a4038a9e9 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayBlock.java @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.LongArray; +import org.elasticsearch.core.Releasables; + +import java.util.BitSet; + +/** + * Block implementation that stores values in a {@link LongBigArrayVector}. Does not take ownership of the given + * {@link LongArray} and does not adjust circuit breakers to account for it. + * This class is generated. Do not edit it. + */ +public final class LongBigArrayBlock extends AbstractArrayBlock implements LongBlock { + + private static final long BASE_RAM_BYTES_USED = 0; // TODO: fix this + private final LongBigArrayVector vector; + + public LongBigArrayBlock( + LongArray values, + int positionCount, + int[] firstValueIndexes, + BitSet nulls, + MvOrdering mvOrdering, + BlockFactory blockFactory + ) { + super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); + this.vector = new LongBigArrayVector(values, (int) values.size(), blockFactory); + } + + @Override + public LongVector asVector() { + return null; + } + + @Override + public long getLong(int valueIndex) { + return vector.getLong(valueIndex); + } + + @Override + public LongBlock filter(int... positions) { + try (var builder = blockFactory().newLongBlockBuilder(positions.length)) { + for (int pos : positions) { + if (isNull(pos)) { + builder.appendNull(); + continue; + } + int valueCount = getValueCount(pos); + int first = getFirstValueIndex(pos); + if (valueCount == 1) { + builder.appendLong(getLong(getFirstValueIndex(pos))); + } else { + builder.beginPositionEntry(); + for (int c = 0; c < valueCount; c++) { + builder.appendLong(getLong(first + c)); + } + builder.endPositionEntry(); + } + } + return builder.mvOrdering(mvOrdering()).build(); + } + } + + @Override + public ElementType elementType() { + return ElementType.LONG; + } + + @Override + public LongBlock expand() { + if (firstValueIndexes == null) { + incRef(); + return this; + } + // TODO use reference counting to share the vector + try (var builder = blockFactory().newLongBlockBuilder(firstValueIndexes[getPositionCount()])) { + for (int pos = 0; pos < getPositionCount(); pos++) { + if (isNull(pos)) { + builder.appendNull(); + continue; + } + int first = getFirstValueIndex(pos); + int end = first + getValueCount(pos); + for (int i = first; i < end; i++) { + builder.appendLong(getLong(i)); + } + } + return builder.mvOrdering(MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING).build(); + } + } + + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + } + + @Override + public long ramBytesUsed() { + return ramBytesUsedOnlyBlock() + RamUsageEstimator.sizeOf(vector); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof LongBlock that) { + return LongBlock.equals(this, that); + } + return false; + } + + @Override + public int hashCode() { + return LongBlock.hash(this); + } + + @Override + public String toString() { + return getClass().getSimpleName() + + "[positions=" + + getPositionCount() + + ", mvOrdering=" + + mvOrdering() + + ", ramBytesUsed=" + + vector.ramBytesUsed() + + ']'; + } + + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + + @Override + public void closeInternal() { + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java index 2101b606e9a90..8c9f908e65368 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBigArrayVector.java @@ -12,30 +12,24 @@ import org.elasticsearch.core.Releasable; /** - * Vector implementation that defers to an enclosed LongArray. + * Vector implementation that defers to an enclosed {@link LongArray}. + * Does not take ownership of the array and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ public final class LongBigArrayVector extends AbstractVector implements LongVector, Releasable { - private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(LongBigArrayVector.class); + private static final long BASE_RAM_BYTES_USED = 0; // FIXME private final LongArray values; - private final LongBlock block; - - public LongBigArrayVector(LongArray values, int positionCount) { - this(values, positionCount, BlockFactory.getNonBreakingInstance()); - } - public LongBigArrayVector(LongArray values, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.values = values; - this.block = new LongVectorBlock(this); } @Override public LongBlock asBlock() { - return block; + return new LongVectorBlock(this); } @Override @@ -69,11 +63,9 @@ public LongVector filter(int... positions) { } @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + public void closeInternal() { + // The circuit breaker that tracks the values {@link LongArray} is adjusted outside + // of this class. values.close(); } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java index 41ac8f7237f64..a56019faa8e31 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlock.java @@ -18,7 +18,7 @@ * Block that stores long values. * This class is generated. Do not edit it. */ -public sealed interface LongBlock extends Block permits LongArrayBlock, LongVectorBlock, ConstantNullBlock { +public sealed interface LongBlock extends Block permits LongArrayBlock, LongVectorBlock, ConstantNullBlock, LongBigArrayBlock { /** * Retrieves the long value stored at the given value index. @@ -167,16 +167,6 @@ static int hash(LongBlock block) { return result; } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newLongBlockBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newBlockBuilder(int estimatedSize) { - return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - /** * Returns a builder. * @deprecated use {@link BlockFactory#newLongBlockBuilder} @@ -186,16 +176,6 @@ static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { return blockFactory.newLongBlockBuilder(estimatedSize); } - /** - * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newConstantLongBlockWith} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static LongBlock newConstantBlockWith(long value, int positions) { - return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); - } - /** * Returns a constant block. * @deprecated use {@link BlockFactory#newConstantLongBlockWith} diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java index 1692c4cff6a57..4f8c1658c0973 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongBlockBuilder.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.util.LongArray; import java.util.Arrays; @@ -179,6 +180,29 @@ public LongBlockBuilder mvOrdering(Block.MvOrdering mvOrdering) { return this; } + private LongBlock buildBigArraysBlock() { + final LongBlock theBlock; + final LongArray array = blockFactory.bigArrays().newLongArray(valueCount, false); + for (int i = 0; i < valueCount; i++) { + array.set(i, values[i]); + } + if (isDense() && singleValued()) { + theBlock = new LongBigArrayVector(array, positionCount, blockFactory).asBlock(); + } else { + theBlock = new LongBigArrayBlock(array, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); + } + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(theBlock.ramBytesUsed() - estimatedBytes - array.ramBytesUsed(), false); + return theBlock; + } + @Override public LongBlock build() { try { @@ -187,20 +211,26 @@ public LongBlock build() { if (hasNonNullValue && positionCount == 1 && valueCount == 1) { theBlock = blockFactory.newConstantLongBlockWith(values[0], 1, estimatedBytes); } else { - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - values = Arrays.copyOf(values, valueCount); - } - if (isDense() && singleValued()) { - theBlock = blockFactory.newLongArrayVector(values, positionCount, estimatedBytes).asBlock(); + if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { + theBlock = buildBigArraysBlock(); } else { - theBlock = blockFactory.newLongArrayBlock( - values, - positionCount, - firstValueIndexes, - nullsMask, - mvOrdering, - estimatedBytes - ); + if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { + adjustBreaker(valueCount * elementSize()); + values = Arrays.copyOf(values, valueCount); + adjustBreaker(-values.length * elementSize()); + } + if (isDense() && singleValued()) { + theBlock = blockFactory.newLongArrayVector(values, positionCount, estimatedBytes).asBlock(); + } else { + theBlock = blockFactory.newLongArrayBlock( + values, + positionCount, + firstValueIndexes, + nullsMask, + mvOrdering, + estimatedBytes + ); + } } } built(); diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java index a312d7aeab0cc..358f5b32366cb 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVector.java @@ -102,40 +102,10 @@ default void writeTo(StreamOutput out) throws IOException { } } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. - * @deprecated use {@link BlockFactory#newLongVectorBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newVectorBuilder(int estimatedSize) { - return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - - /** - * Creates a builder that grows as needed. Prefer {@link #newVectorFixedBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newLongVectorBuilder} - */ - @Deprecated - static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.newLongVectorBuilder(estimatedSize); - } - - /** - * Creates a builder that never grows. Prefer this over {@link #newVectorBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#newLongVectorFixedBuilder} - */ - @Deprecated - static FixedBuilder newVectorFixedBuilder(int size, BlockFactory blockFactory) { - return blockFactory.newLongVectorFixedBuilder(size); - } - /** * A builder that grows as needed. */ - sealed interface Builder extends Vector.Builder permits LongVectorBuilder { + sealed interface Builder extends Vector.Builder permits LongVectorBuilder, FixedBuilder { /** * Appends a long to the current entry. */ @@ -148,13 +118,11 @@ sealed interface Builder extends Vector.Builder permits LongVectorBuilder { /** * A builder that never grows. */ - sealed interface FixedBuilder extends Vector.Builder permits LongVectorFixedBuilder { + sealed interface FixedBuilder extends Builder permits LongVectorFixedBuilder { /** * Appends a long to the current entry. */ - FixedBuilder appendLong(long value); - @Override - LongVector build(); + FixedBuilder appendLong(long value); } } diff --git a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java index 589a9341188fc..1f4565fec5a8d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java +++ b/x-pack/plugin/esql/compute/src/main/generated-src/org/elasticsearch/compute/data/LongVectorBlock.java @@ -10,7 +10,7 @@ import org.elasticsearch.core.Releasables; /** - * Block view of a LongVector. + * Block view of a {@link LongVector}. Cannot represent multi-values or nulls. * This class is generated. Do not edit it. */ public final class LongVectorBlock extends AbstractVectorBlock implements LongBlock { @@ -73,11 +73,6 @@ public String toString() { return getClass().getSimpleName() + "[vector=" + vector + "]"; } - @Override - public boolean isReleased() { - return super.isReleased() || vector.isReleased(); - } - @Override public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java index dd5450d3b460c..e9b4498d50265 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregatorFunction.java @@ -86,14 +86,18 @@ private void addRawBlock(BooleanBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block fbitUncast = page.getBlock(channels.get(0)); + if (fbitUncast.areAllValuesNull()) { return; } - BooleanVector fbit = page.getBlock(channels.get(0)).asVector(); - BooleanVector tbit = page.getBlock(channels.get(1)).asVector(); + BooleanVector fbit = ((BooleanBlock) fbitUncast).asVector(); assert fbit.getPositionCount() == 1; - assert fbit.getPositionCount() == tbit.getPositionCount(); + Block tbitUncast = page.getBlock(channels.get(1)); + if (tbitUncast.areAllValuesNull()) { + return; + } + BooleanVector tbit = ((BooleanBlock) tbitUncast).asVector(); + assert tbit.getPositionCount() == 1; CountDistinctBooleanAggregator.combineIntermediate(state, fbit.getBoolean(0), tbit.getBoolean(0)); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java index fd770678d5943..3591dbeb41ffa 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregatorFunction.java @@ -95,11 +95,11 @@ private void addRawBlock(BytesRefBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { return; } - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); assert hll.getPositionCount() == 1; BytesRef scratch = new BytesRef(); CountDistinctBytesRefAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java index a8169b5a901e1..38d4c7250debe 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregatorFunction.java @@ -95,11 +95,11 @@ private void addRawBlock(DoubleBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { return; } - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); assert hll.getPositionCount() == 1; BytesRef scratch = new BytesRef(); CountDistinctDoubleAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java index 9f685f4672939..d4bc68500745e 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctIntAggregatorFunction.java @@ -95,11 +95,11 @@ private void addRawBlock(IntBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { return; } - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); assert hll.getPositionCount() == 1; BytesRef scratch = new BytesRef(); CountDistinctIntAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java index 55b396aa627d5..06c6f67b356e0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/CountDistinctLongAggregatorFunction.java @@ -95,11 +95,11 @@ private void addRawBlock(LongBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block hllUncast = page.getBlock(channels.get(0)); + if (hllUncast.areAllValuesNull()) { return; } - BytesRefVector hll = page.getBlock(channels.get(0)).asVector(); + BytesRefVector hll = ((BytesRefBlock) hllUncast).asVector(); assert hll.getPositionCount() == 1; BytesRef scratch = new BytesRef(); CountDistinctLongAggregator.combineIntermediate(state, hll.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java index 6929900c29ea1..ae6a81244c7dd 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxDoubleAggregatorFunction.java @@ -90,14 +90,18 @@ private void addRawBlock(DoubleBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block maxUncast = page.getBlock(channels.get(0)); + if (maxUncast.areAllValuesNull()) { return; } - DoubleVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + DoubleVector max = ((DoubleBlock) maxUncast).asVector(); assert max.getPositionCount() == 1; - assert max.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.doubleValue(MaxDoubleAggregator.combine(state.doubleValue(), max.getDouble(0))); state.seen(true); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java index 1759442fbb12a..cf12c29df273d 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxIntAggregatorFunction.java @@ -90,14 +90,18 @@ private void addRawBlock(IntBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block maxUncast = page.getBlock(channels.get(0)); + if (maxUncast.areAllValuesNull()) { return; } - IntVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + IntVector max = ((IntBlock) maxUncast).asVector(); assert max.getPositionCount() == 1; - assert max.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.intValue(MaxIntAggregator.combine(state.intValue(), max.getInt(0))); state.seen(true); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java index fe7d797faf10a..a3cee0e24f687 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MaxLongAggregatorFunction.java @@ -90,14 +90,18 @@ private void addRawBlock(LongBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block maxUncast = page.getBlock(channels.get(0)); + if (maxUncast.areAllValuesNull()) { return; } - LongVector max = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + LongVector max = ((LongBlock) maxUncast).asVector(); assert max.getPositionCount() == 1; - assert max.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.longValue(MaxLongAggregator.combine(state.longValue(), max.getLong(0))); state.seen(true); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java index a2e8d8fbf592c..4bcf08ce0fa35 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregatorFunction.java @@ -88,11 +88,11 @@ private void addRawBlock(DoubleBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { return; } - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); assert quart.getPositionCount() == 1; BytesRef scratch = new BytesRef(); MedianAbsoluteDeviationDoubleAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java index 21e99587a5d09..db9dbdab52244 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregatorFunction.java @@ -88,11 +88,11 @@ private void addRawBlock(IntBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { return; } - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); assert quart.getPositionCount() == 1; BytesRef scratch = new BytesRef(); MedianAbsoluteDeviationIntAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java index 8c3aa95864aff..bf5fd51d7ed17 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregatorFunction.java @@ -88,11 +88,11 @@ private void addRawBlock(LongBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { return; } - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); assert quart.getPositionCount() == 1; BytesRef scratch = new BytesRef(); MedianAbsoluteDeviationLongAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java index 1f9a8fb49fb2d..7395a8c16f084 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinDoubleAggregatorFunction.java @@ -90,14 +90,18 @@ private void addRawBlock(DoubleBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block minUncast = page.getBlock(channels.get(0)); + if (minUncast.areAllValuesNull()) { return; } - DoubleVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + DoubleVector min = ((DoubleBlock) minUncast).asVector(); assert min.getPositionCount() == 1; - assert min.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.doubleValue(MinDoubleAggregator.combine(state.doubleValue(), min.getDouble(0))); state.seen(true); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java index bbeba4c8374ab..d8db488b9ccb6 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinIntAggregatorFunction.java @@ -90,14 +90,18 @@ private void addRawBlock(IntBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block minUncast = page.getBlock(channels.get(0)); + if (minUncast.areAllValuesNull()) { return; } - IntVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + IntVector min = ((IntBlock) minUncast).asVector(); assert min.getPositionCount() == 1; - assert min.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.intValue(MinIntAggregator.combine(state.intValue(), min.getInt(0))); state.seen(true); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java index 5299b505e124c..42ca9c7919ac0 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/MinLongAggregatorFunction.java @@ -90,14 +90,18 @@ private void addRawBlock(LongBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block minUncast = page.getBlock(channels.get(0)); + if (minUncast.areAllValuesNull()) { return; } - LongVector min = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + LongVector min = ((LongBlock) minUncast).asVector(); assert min.getPositionCount() == 1; - assert min.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.longValue(MinLongAggregator.combine(state.longValue(), min.getLong(0))); state.seen(true); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java index f7560379e476d..cd7a5b5974442 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileDoubleAggregatorFunction.java @@ -91,11 +91,11 @@ private void addRawBlock(DoubleBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { return; } - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); assert quart.getPositionCount() == 1; BytesRef scratch = new BytesRef(); PercentileDoubleAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java index d45ba7a1e350a..b9b1c2e90b768 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileIntAggregatorFunction.java @@ -91,11 +91,11 @@ private void addRawBlock(IntBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { return; } - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); assert quart.getPositionCount() == 1; BytesRef scratch = new BytesRef(); PercentileIntAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java index dac045d814926..cc785ce55bb55 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/PercentileLongAggregatorFunction.java @@ -91,11 +91,11 @@ private void addRawBlock(LongBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block quartUncast = page.getBlock(channels.get(0)); + if (quartUncast.areAllValuesNull()) { return; } - BytesRefVector quart = page.getBlock(channels.get(0)).asVector(); + BytesRefVector quart = ((BytesRefBlock) quartUncast).asVector(); assert quart.getPositionCount() == 1; BytesRef scratch = new BytesRef(); PercentileLongAggregator.combineIntermediate(state, quart.getBytesRef(0, scratch)); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java index 5520c587555b3..57f60b62a30d7 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumDoubleAggregatorFunction.java @@ -91,15 +91,24 @@ private void addRawBlock(DoubleBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block valueUncast = page.getBlock(channels.get(0)); + if (valueUncast.areAllValuesNull()) { return; } - DoubleVector value = page.getBlock(channels.get(0)).asVector(); - DoubleVector delta = page.getBlock(channels.get(1)).asVector(); - BooleanVector seen = page.getBlock(channels.get(2)).asVector(); + DoubleVector value = ((DoubleBlock) valueUncast).asVector(); assert value.getPositionCount() == 1; - assert value.getPositionCount() == delta.getPositionCount() && value.getPositionCount() == seen.getPositionCount(); + Block deltaUncast = page.getBlock(channels.get(1)); + if (deltaUncast.areAllValuesNull()) { + return; + } + DoubleVector delta = ((DoubleBlock) deltaUncast).asVector(); + assert delta.getPositionCount() == 1; + Block seenUncast = page.getBlock(channels.get(2)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; SumDoubleAggregator.combineIntermediate(state, value.getDouble(0), delta.getDouble(0), seen.getBoolean(0)); } diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java index 1225b90bf09f7..c0ca6d5ef6f96 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumIntAggregatorFunction.java @@ -92,14 +92,18 @@ private void addRawBlock(IntBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block sumUncast = page.getBlock(channels.get(0)); + if (sumUncast.areAllValuesNull()) { return; } - LongVector sum = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + LongVector sum = ((LongBlock) sumUncast).asVector(); assert sum.getPositionCount() == 1; - assert sum.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.longValue(SumIntAggregator.combine(state.longValue(), sum.getLong(0))); state.seen(true); diff --git a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java index 720e7ca9f3bbf..90fb9294a8a02 100644 --- a/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/generated/org/elasticsearch/compute/aggregation/SumLongAggregatorFunction.java @@ -90,14 +90,18 @@ private void addRawBlock(LongBlock block) { public void addIntermediateInput(Page page) { assert channels.size() == intermediateBlockCount(); assert page.getBlockCount() >= channels.get(0) + intermediateStateDesc().size(); - Block uncastBlock = page.getBlock(channels.get(0)); - if (uncastBlock.areAllValuesNull()) { + Block sumUncast = page.getBlock(channels.get(0)); + if (sumUncast.areAllValuesNull()) { return; } - LongVector sum = page.getBlock(channels.get(0)).asVector(); - BooleanVector seen = page.getBlock(channels.get(1)).asVector(); + LongVector sum = ((LongBlock) sumUncast).asVector(); assert sum.getPositionCount() == 1; - assert sum.getPositionCount() == seen.getPositionCount(); + Block seenUncast = page.getBlock(channels.get(1)); + if (seenUncast.areAllValuesNull()) { + return; + } + BooleanVector seen = ((BooleanBlock) seenUncast).asVector(); + assert seen.getPositionCount() == 1; if (seen.getBoolean(0)) { state.longValue(SumLongAggregator.combine(state.longValue(), sum.getLong(0))); state.seen(true); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/OwningChannelActionListener.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/OwningChannelActionListener.java deleted file mode 100644 index 50a20ee6ee73d..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/OwningChannelActionListener.java +++ /dev/null @@ -1,44 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute; - -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.ChannelActionListener; -import org.elasticsearch.transport.TransportChannel; -import org.elasticsearch.transport.TransportResponse; - -/** - * Wraps a {@link ChannelActionListener} and takes ownership of responses passed to - * {@link org.elasticsearch.action.ActionListener#onResponse(Object)}; the reference count will be decreased once sending is done. - * - * Deprecated: use {@link ChannelActionListener} instead and ensure responses sent to it are properly closed after. - */ -@Deprecated(forRemoval = true) -public final class OwningChannelActionListener implements ActionListener { - private final ChannelActionListener listener; - - public OwningChannelActionListener(TransportChannel channel) { - this.listener = new ChannelActionListener<>(channel); - } - - @Override - public void onResponse(Response response) { - ActionListener.respondAndRelease(listener, response); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); - } - - @Override - public String toString() { - return "OwningChannelActionListener{" + listener + "}"; - } - -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregator.java index d083a48fffb7a..c960847f559fb 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBooleanAggregator.java @@ -33,10 +33,6 @@ public static void combine(SingleState current, boolean v) { current.bits |= v ? BIT_TRUE : BIT_FALSE; } - public static void combineStates(SingleState current, SingleState state) { - current.bits |= state.bits; - } - public static void combineIntermediate(SingleState current, boolean fbit, boolean tbit) { if (fbit) current.bits |= BIT_FALSE; if (tbit) current.bits |= BIT_TRUE; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregator.java index 89ad27f1fef28..a28a68276236a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctBytesRefAggregator.java @@ -29,10 +29,6 @@ public static void combine(HllStates.SingleState current, BytesRef v) { current.collect(v); } - public static void combineStates(HllStates.SingleState current, HllStates.SingleState state) { - current.merge(0, state.hll, 0); - } - public static void combineIntermediate(HllStates.SingleState current, BytesRef inValue) { current.merge(0, inValue, 0); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregator.java index 86b3f9997246e..5736907444117 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctDoubleAggregator.java @@ -29,10 +29,6 @@ public static void combine(HllStates.SingleState current, double v) { current.collect(v); } - public static void combineStates(HllStates.SingleState current, HllStates.SingleState state) { - current.merge(0, state.hll, 0); - } - public static void combineIntermediate(HllStates.SingleState current, BytesRef inValue) { current.merge(0, inValue, 0); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregator.java index 993284b0c57c3..2054344814919 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctIntAggregator.java @@ -29,10 +29,6 @@ public static void combine(HllStates.SingleState current, int v) { current.collect(v); } - public static void combineStates(HllStates.SingleState current, HllStates.SingleState state) { - current.merge(0, state.hll, 0); - } - public static void combineIntermediate(HllStates.SingleState current, BytesRef inValue) { current.merge(0, inValue, 0); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregator.java index a09c8df3b0fc3..88b5b9e90198e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountDistinctLongAggregator.java @@ -29,10 +29,6 @@ public static void combine(HllStates.SingleState current, long v) { current.collect(v); } - public static void combineStates(HllStates.SingleState current, HllStates.SingleState state) { - current.merge(0, state.hll, 0); - } - public static void combineIntermediate(HllStates.SingleState current, BytesRef inValue) { current.merge(0, inValue, 0); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java index 995dc5e15740f..5dba070172ae9 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/CountGroupingAggregatorFunction.java @@ -181,7 +181,7 @@ public void evaluateIntermediate(Block[] blocks, int offset, IntVector selected) @Override public void evaluateFinal(Block[] blocks, int offset, IntVector selected, DriverContext driverContext) { - try (LongVector.Builder builder = LongVector.newVectorBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try (LongVector.Builder builder = driverContext.blockFactory().newLongVectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { int si = selected.getInt(i); builder.appendLong(state.hasValue(si) ? state.get(si) : 0); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregator.java index 2d73c323e9556..db0d57b887008 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationDoubleAggregator.java @@ -28,10 +28,6 @@ public static void combine(QuantileStates.SingleState current, double v) { current.add(v); } - public static void combineStates(QuantileStates.SingleState current, QuantileStates.SingleState state) { - current.add(state); - } - public static void combineIntermediate(QuantileStates.SingleState state, BytesRef inValue) { state.add(inValue); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregator.java index b4696f0ab1934..a57e28aebd437 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationIntAggregator.java @@ -28,10 +28,6 @@ public static void combine(QuantileStates.SingleState current, int v) { current.add(v); } - public static void combineStates(QuantileStates.SingleState current, QuantileStates.SingleState state) { - current.add(state); - } - public static void combineIntermediate(QuantileStates.SingleState state, BytesRef inValue) { state.add(inValue); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregator.java index bbd9f1821b681..54340f809e4cd 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/MedianAbsoluteDeviationLongAggregator.java @@ -32,10 +32,6 @@ public static void combineIntermediate(QuantileStates.SingleState state, BytesRe state.add(inValue); } - public static void combineStates(QuantileStates.SingleState current, QuantileStates.SingleState state) { - current.add(state); - } - public static Block evaluateFinal(QuantileStates.SingleState state, DriverContext driverContext) { return state.evaluateMedianAbsoluteDeviation(driverContext); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregator.java index 3020a920ebddb..1cff8d89b7541 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileDoubleAggregator.java @@ -28,10 +28,6 @@ public static void combine(QuantileStates.SingleState current, double v) { current.add(v); } - public static void combineStates(QuantileStates.SingleState current, QuantileStates.SingleState state) { - current.add(state); - } - public static void combineIntermediate(QuantileStates.SingleState state, BytesRef inValue) { state.add(inValue); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileIntAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileIntAggregator.java index 4ccd409cc8ccf..d93dc7099fffe 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileIntAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileIntAggregator.java @@ -28,10 +28,6 @@ public static void combine(QuantileStates.SingleState current, int v) { current.add(v); } - public static void combineStates(QuantileStates.SingleState current, QuantileStates.SingleState state) { - current.add(state); - } - public static void combineIntermediate(QuantileStates.SingleState state, BytesRef inValue) { state.add(inValue); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileLongAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileLongAggregator.java index 2a0eb3a060930..9d900069d15ae 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileLongAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/PercentileLongAggregator.java @@ -28,10 +28,6 @@ public static void combine(QuantileStates.SingleState current, long v) { current.add(v); } - public static void combineStates(QuantileStates.SingleState current, QuantileStates.SingleState state) { - current.add(state); - } - public static void combineIntermediate(QuantileStates.SingleState state, BytesRef inValue) { state.add(inValue); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java index 0b5b89425ed46..0ba7afb0d5e68 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/QuantileStates.java @@ -72,10 +72,6 @@ void add(double v) { digest.add(v); } - void add(SingleState other) { - digest.add(other.digest); - } - void add(BytesRef other) { digest.add(deserializeDigest(other)); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/SumDoubleAggregator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/SumDoubleAggregator.java index 4c2c38da28b75..fcd972045d252 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/SumDoubleAggregator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/SumDoubleAggregator.java @@ -42,10 +42,6 @@ public static void combine(SumState current, double value, double delta) { current.add(value, delta); } - public static void combineStates(SumState current, SumState state) { - current.add(state.value(), state.delta()); - } - public static void combineIntermediate(SumState state, double inValue, double inDelta, boolean seen) { if (seen) { combine(state, inValue, inDelta); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st index 42f86580a228d..76499a1fa7151 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/X-ArrayState.java.st @@ -73,7 +73,7 @@ $endif$ Block toValuesBlock(org.elasticsearch.compute.data.IntVector selected, DriverContext driverContext) { if (false == trackingGroupIds()) { - try ($Type$Vector.Builder builder = $Type$Vector.newVectorBuilder(selected.getPositionCount(), driverContext.blockFactory())) { + try ($Type$Vector.Builder builder = driverContext.blockFactory().new$Type$VectorFixedBuilder(selected.getPositionCount())) { for (int i = 0; i < selected.getPositionCount(); i++) { builder.append$Type$(values.get(selected.getInt(i))); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java index 684e6aec60b9e..aa7c737e331c7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BooleanBlockHash.java @@ -51,8 +51,8 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { addInput.add(0, groupIds); } } else { - try (IntBlock groupIds = add(booleanVector).asBlock()) { - addInput.add(0, groupIds.asVector()); + try (IntVector groupIds = add(booleanVector)) { + addInput.add(0, groupIds); } } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java index da2c85e532016..7ee8a7165aa17 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/BytesRefLongBlockHash.java @@ -18,7 +18,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.BytesRefVector; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.LongVector; @@ -72,7 +71,9 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { BytesRefVector vector1 = block1.asVector(); LongVector vector2 = block2.asVector(); if (vector1 != null && vector2 != null) { - addInput.add(0, add(vector1, vector2)); + try (IntVector ords = add(vector1, vector2)) { + addInput.add(0, ords); + } } else { try (AddWork work = new AddWork(block1, block2, addInput)) { work.add(); @@ -88,7 +89,7 @@ public IntVector add(BytesRefVector vector1, LongVector vector2) { long hash1 = hashOrdToGroup(bytesHash.add(vector1.getBytesRef(i, scratch))); ords[i] = Math.toIntExact(hashOrdToGroup(finalHash.add(hash1, vector2.getLong(i)))); } - return new IntArrayVector(ords, positions); + return blockFactory.newIntArrayVector(ords, positions); } private static final long[] EMPTY = new long[0]; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java index ce53f0bb8e7f4..49b16198a5d77 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/aggregation/blockhash/LongLongBlockHash.java @@ -65,7 +65,7 @@ public void add(Page page, GroupingAggregatorFunction.AddInput addInput) { private IntVector add(LongVector vector1, LongVector vector2) { int positions = vector1.getPositionCount(); - try (var builder = IntVector.newVectorFixedBuilder(positions, blockFactory)) { + try (var builder = blockFactory.newIntVectorFixedBuilder(positions)) { for (int i = 0; i < positions; i++) { builder.appendInt(Math.toIntExact(hashOrdToGroup(hash.add(vector1.getLong(i), vector2.getLong(i))))); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java index 177e3fb6798d1..8ce6ef9ab78ab 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractBlock.java @@ -11,8 +11,7 @@ import java.util.BitSet; -abstract class AbstractBlock implements Block { - private int references = 1; +abstract class AbstractBlock extends AbstractNonThreadSafeRefCounted implements Block { private final int positionCount; @Nullable @@ -101,55 +100,7 @@ public void allowPassingToDifferentDriver() { } @Override - public boolean isReleased() { + public final boolean isReleased() { return hasReferences() == false; } - - @Override - public final void incRef() { - if (isReleased()) { - throw new IllegalStateException("can't increase refCount on already released block [" + this + "]"); - } - references++; - } - - @Override - public final boolean tryIncRef() { - if (isReleased()) { - return false; - } - references++; - return true; - } - - @Override - public final boolean decRef() { - if (isReleased()) { - throw new IllegalStateException("can't release already released block [" + this + "]"); - } - - references--; - - if (references <= 0) { - closeInternal(); - return true; - } - return false; - } - - @Override - public final boolean hasReferences() { - return references >= 1; - } - - @Override - public final void close() { - decRef(); - } - - /** - * This is called when the number of references reaches zero. - * It must release any resources held by the block (adjusting circuit breakers if needed). - */ - protected abstract void closeInternal(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractNonThreadSafeRefCounted.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractNonThreadSafeRefCounted.java new file mode 100644 index 0000000000000..2dfd8c3eca5ac --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractNonThreadSafeRefCounted.java @@ -0,0 +1,67 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.core.RefCounted; +import org.elasticsearch.core.Releasable; + +/** + * Releasable, non-threadsafe version of {@link org.elasticsearch.core.AbstractRefCounted}. + * Calls to {@link AbstractNonThreadSafeRefCounted#decRef()} and {@link AbstractNonThreadSafeRefCounted#close()} are equivalent. + */ +abstract class AbstractNonThreadSafeRefCounted implements RefCounted, Releasable { + private int references = 1; + + @Override + public final void incRef() { + if (hasReferences() == false) { + throw new IllegalStateException("can't increase refCount on already released object [" + this + "]"); + } + references++; + } + + @Override + public final boolean tryIncRef() { + if (hasReferences() == false) { + return false; + } + references++; + return true; + } + + @Override + public final boolean decRef() { + if (hasReferences() == false) { + throw new IllegalStateException("can't release already released object [" + this + "]"); + } + + references--; + + if (references <= 0) { + closeInternal(); + return true; + } + return false; + } + + @Override + public final boolean hasReferences() { + return references >= 1; + } + + @Override + public final void close() { + decRef(); + } + + /** + * This is called when the number of references reaches zero. + * This is where resources should be released (adjusting circuit breakers if needed). + */ + protected abstract void closeInternal(); +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java index 33ef14cfb4ad8..cc9727b751411 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/AbstractVector.java @@ -10,11 +10,10 @@ /** * A dense Vector of single values. */ -abstract class AbstractVector implements Vector { +abstract class AbstractVector extends AbstractNonThreadSafeRefCounted implements Vector { private final int positionCount; private BlockFactory blockFactory; - protected boolean released; protected AbstractVector(int positionCount, BlockFactory blockFactory) { this.positionCount = positionCount; @@ -41,16 +40,12 @@ public void allowPassingToDifferentDriver() { } @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + protected void closeInternal() { blockFactory.adjustBreaker(-ramBytesUsed(), true); } @Override public final boolean isReleased() { - return released; + return hasReferences() == false; } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java index 964e510de9a20..4a24493de76c3 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Block.java @@ -159,16 +159,6 @@ default boolean mvSortedAscending() { */ Block expand(); - /** - * {@return a constant null block with the given number of positions, using the non-breaking block factory}. - * @deprecated use {@link BlockFactory#newConstantNullBlock} - */ - // Eventually, this should use the GLOBAL breaking instance - @Deprecated - static Block constantNullBlock(int positions) { - return constantNullBlock(positions, BlockFactory.getNonBreakingInstance()); - } - /** * {@return a constant null block with the given number of positions}. * @deprecated use {@link BlockFactory#newConstantNullBlock} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java index 092f66a7d4427..dccbb03c0e48e 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactory.java @@ -10,7 +10,6 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; -import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefArray; @@ -25,35 +24,32 @@ public class BlockFactory { public static final String LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING = "esql.block_factory.local_breaker.max_over_reserved"; public static final ByteSizeValue LOCAL_BREAKER_OVER_RESERVED_DEFAULT_MAX_SIZE = ByteSizeValue.ofKb(16); - private static final BlockFactory NON_BREAKING = BlockFactory.getInstance( - new NoopCircuitBreaker("noop-esql-breaker"), - BigArrays.NON_RECYCLING_INSTANCE - ); + public static final String MAX_BLOCK_PRIMITIVE_ARRAY_SIZE_SETTING = "esql.block_factory.max_block_primitive_array_size"; + public static final ByteSizeValue DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE = ByteSizeValue.ofKb(512); private final CircuitBreaker breaker; private final BigArrays bigArrays; + private final long maxPrimitiveArrayBytes; private final BlockFactory parent; public BlockFactory(CircuitBreaker breaker, BigArrays bigArrays) { - this(breaker, bigArrays, null); + this(breaker, bigArrays, DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE); } - protected BlockFactory(CircuitBreaker breaker, BigArrays bigArrays, BlockFactory parent) { + public BlockFactory(CircuitBreaker breaker, BigArrays bigArrays, ByteSizeValue maxPrimitiveArraySize) { + this(breaker, bigArrays, maxPrimitiveArraySize, null); + } + + public BlockFactory(CircuitBreaker breaker, BigArrays bigArrays, ByteSizeValue maxPrimitiveArraySize, BlockFactory parent) { this.breaker = breaker; this.bigArrays = bigArrays; this.parent = parent; - } - - /** - * Returns the Non-Breaking block factory. - */ - public static BlockFactory getNonBreakingInstance() { - return NON_BREAKING; + this.maxPrimitiveArrayBytes = maxPrimitiveArraySize.getBytes(); } public static BlockFactory getInstance(CircuitBreaker breaker, BigArrays bigArrays) { - return new BlockFactory(breaker, bigArrays); + return new BlockFactory(breaker, bigArrays, DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE, null); } // For testing @@ -74,7 +70,7 @@ public BlockFactory newChildFactory(LocalCircuitBreaker childBreaker) { if (childBreaker.parentBreaker() != breaker) { throw new IllegalStateException("Different parent breaker"); } - return new BlockFactory(childBreaker, bigArrays, this); + return new BlockFactory(childBreaker, bigArrays, ByteSizeValue.ofBytes(maxPrimitiveArrayBytes), this); } /** @@ -391,4 +387,11 @@ public Block newConstantNullBlock(int positions) { adjustBreaker(b.ramBytesUsed(), true); return b; } + + /** + * Returns the maximum number of bytes that a Block should be backed by a primitive array before switching to using BigArrays. + */ + public long maxPrimitiveArrayBytes() { + return maxPrimitiveArrayBytes; + } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java index 405dd088bf3a5..058d623dce55d 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockUtils.java @@ -216,7 +216,7 @@ public static void appendValue(Block.Builder builder, Object val, ElementType ty public static Block constantBlock(BlockFactory blockFactory, Object val, int size) { if (val == null) { - return Block.constantNullBlock(size); + return blockFactory.newConstantNullBlock(size); } return constantBlock(blockFactory, fromJava(val.getClass()), val, size); } @@ -224,7 +224,7 @@ public static Block constantBlock(BlockFactory blockFactory, Object val, int siz // TODO: allow null values private static Block constantBlock(BlockFactory blockFactory, ElementType type, Object val, int size) { return switch (type) { - case NULL -> Block.constantNullBlock(size); + case NULL -> blockFactory.newConstantNullBlock(size); case LONG -> LongBlock.newConstantBlockWith((long) val, size, blockFactory); case INT -> IntBlock.newConstantBlockWith((int) val, size, blockFactory); case BYTES_REF -> BytesRefBlock.newConstantBlockWith(toBytesRef(val), size, blockFactory); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java index 29e39f43cddc2..639e1c298291f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ConstantNullBlock.java @@ -19,15 +19,10 @@ /** * Block implementation representing a constant null value. */ -public final class ConstantNullBlock extends AbstractBlock implements BooleanBlock, IntBlock, LongBlock, DoubleBlock, BytesRefBlock { +final class ConstantNullBlock extends AbstractBlock implements BooleanBlock, IntBlock, LongBlock, DoubleBlock, BytesRefBlock { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ConstantNullBlock.class); - // Eventually, this should use the GLOBAL breaking instance - ConstantNullBlock(int positionCount) { - this(positionCount, BlockFactory.getNonBreakingInstance()); - } - ConstantNullBlock(int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); } @@ -83,8 +78,9 @@ public String getWriteableName() { return "ConstantNullBlock"; } - static ConstantNullBlock of(StreamInput in) throws IOException { - return new ConstantNullBlock(in.readVInt()); + static Block of(StreamInput in) throws IOException { + BlockFactory blockFactory = ((BlockStreamInput) in).blockFactory(); + return blockFactory.newConstantNullBlock(in.readVInt()); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java index d45314f5c8a78..f8e3428d6fee7 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocBlock.java @@ -70,11 +70,6 @@ public long ramBytesUsed() { return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(vector); } - @Override - public boolean isReleased() { - return super.isReleased() || vector.isReleased(); - } - @Override public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; @@ -84,8 +79,8 @@ public void closeInternal() { /** * A builder the for {@link DocBlock}. */ - public static Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - return new Builder(estimatedSize, blockFactory); + public static Builder newBlockBuilder(BlockFactory blockFactory, int estimatedSize) { + return new Builder(blockFactory, estimatedSize); } public static class Builder implements Block.Builder { @@ -93,10 +88,10 @@ public static class Builder implements Block.Builder { private final IntVector.Builder segments; private final IntVector.Builder docs; - private Builder(int estimatedSize, BlockFactory blockFactory) { - shards = IntVector.newVectorBuilder(estimatedSize, blockFactory); - segments = IntVector.newVectorBuilder(estimatedSize, blockFactory); - docs = IntVector.newVectorBuilder(estimatedSize, blockFactory); + private Builder(BlockFactory blockFactory, int estimatedSize) { + shards = blockFactory.newIntVectorBuilder(estimatedSize); + segments = blockFactory.newIntVectorBuilder(estimatedSize); + docs = blockFactory.newIntVectorBuilder(estimatedSize); } public Builder appendShard(int shard) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java index 3097dc73fb814..2461a402fe98a 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/DocVector.java @@ -46,8 +46,6 @@ public final class DocVector extends AbstractVector implements Vector { */ private int[] shardSegmentDocMapBackwards; - final DocBlock block; - public DocVector(IntVector shards, IntVector segments, IntVector docs, Boolean singleSegmentNonDecreasing) { super(shards.getPositionCount(), null); this.shards = shards; @@ -64,7 +62,6 @@ public DocVector(IntVector shards, IntVector segments, IntVector docs, Boolean s "invalid position count [" + shards.getPositionCount() + " != " + docs.getPositionCount() + "]" ); } - block = new DocBlock(this); } public IntVector shards() { @@ -171,12 +168,26 @@ protected void swap(int i, int j) { @Override public DocBlock asBlock() { - return block; + return new DocBlock(this); } @Override public DocVector filter(int... positions) { - return new DocVector(shards.filter(positions), segments.filter(positions), docs.filter(positions), null); + IntVector filteredShards = null; + IntVector filteredSegments = null; + IntVector filteredDocs = null; + DocVector result = null; + try { + filteredShards = shards.filter(positions); + filteredSegments = segments.filter(positions); + filteredDocs = docs.filter(positions); + result = new DocVector(filteredShards, filteredSegments, filteredDocs, null); + return result; + } finally { + if (result == null) { + Releasables.closeExpectNoException(filteredShards, filteredSegments, filteredDocs); + } + } } @Override @@ -231,8 +242,7 @@ public void allowPassingToDifferentDriver() { } @Override - public void close() { - released = true; + public void closeInternal() { Releasables.closeExpectNoException(shards.asBlock(), segments.asBlock(), docs.asBlock()); // Ugh! we always close blocks } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java index 324b6ee963596..2f7d65c8719e6 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/ElementType.java @@ -13,16 +13,16 @@ * The type of elements in {@link Block} and {@link Vector} */ public enum ElementType { - BOOLEAN(BooleanBlock::newBlockBuilder), - INT(IntBlock::newBlockBuilder), - LONG(LongBlock::newBlockBuilder), - DOUBLE(DoubleBlock::newBlockBuilder), + BOOLEAN(BlockFactory::newBooleanBlockBuilder), + INT(BlockFactory::newIntBlockBuilder), + LONG(BlockFactory::newLongBlockBuilder), + DOUBLE(BlockFactory::newDoubleBlockBuilder), /** * Blocks containing only null values. */ - NULL((estimatedSize, blockFactory) -> new ConstantNullBlock.Builder(blockFactory)), + NULL((blockFactory, estimatedSize) -> new ConstantNullBlock.Builder(blockFactory)), - BYTES_REF(BytesRefBlock::newBlockBuilder), + BYTES_REF(BlockFactory::newBytesRefBlockBuilder), /** * Blocks that reference individual lucene documents. @@ -32,10 +32,10 @@ public enum ElementType { /** * Intermediate blocks which don't support retrieving elements. */ - UNKNOWN((estimatedSize, blockFactory) -> { throw new UnsupportedOperationException("can't build null blocks"); }); + UNKNOWN((blockFactory, estimatedSize) -> { throw new UnsupportedOperationException("can't build null blocks"); }); - interface BuilderSupplier { - Block.Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory); + private interface BuilderSupplier { + Block.Builder newBlockBuilder(BlockFactory blockFactory, int estimatedSize); } private final BuilderSupplier builder; @@ -44,20 +44,11 @@ interface BuilderSupplier { this.builder = builder; } - /** - * Create a new {@link Block.Builder} for blocks of this type. - * @deprecated use {@link #newBlockBuilder(int, BlockFactory)} - */ - @Deprecated - public Block.Builder newBlockBuilder(int estimatedSize) { - return builder.newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - /** * Create a new {@link Block.Builder} for blocks of this type. */ public Block.Builder newBlockBuilder(int estimatedSize, BlockFactory blockFactory) { - return builder.newBlockBuilder(estimatedSize, blockFactory); + return builder.newBlockBuilder(blockFactory, estimatedSize); } public static ElementType fromJava(Class type) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java index 0ca06498f7129..fc09f636ac700 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/Vector.java @@ -8,15 +8,16 @@ package org.elasticsearch.compute.data; import org.apache.lucene.util.Accountable; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.Releasable; /** * A dense Vector of single values. */ -public interface Vector extends Accountable, Releasable { +public interface Vector extends Accountable, RefCounted, Releasable { /** - * {@return Returns a Block view over this vector.} + * {@return Returns a new Block containing this vector.} */ Block asBlock(); diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st index 03397e1a2e5ad..01a6d70d63795 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayBlock.java.st @@ -15,31 +15,25 @@ import org.elasticsearch.core.Releasables; $else$ import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.core.Releasables; -import java.util.Arrays; $endif$ import java.util.BitSet; /** - * Block implementation that stores an array of $type$. + * Block implementation that stores values in a {@link $Type$ArrayVector}. +$if(BytesRef)$ + * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. +$endif$ * This class is generated. Do not edit it. */ -public final class $Type$ArrayBlock extends AbstractArrayBlock implements $Type$Block { +final class $Type$ArrayBlock extends AbstractArrayBlock implements $Type$Block { private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance($Type$ArrayBlock.class); -$if(BytesRef)$ - private final BytesRefArray values; - -$else$ - private final $type$[] values; -$endif$ + private final $Type$ArrayVector vector; - public $Type$ArrayBlock($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, int positionCount, int[] firstValueIndexes, BitSet nulls, MvOrdering mvOrdering) { - this(values, positionCount, firstValueIndexes, nulls, mvOrdering, BlockFactory.getNonBreakingInstance()); - } - - public $Type$ArrayBlock( + $Type$ArrayBlock( $if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, int positionCount, int[] firstValueIndexes, @@ -48,7 +42,11 @@ $endif$ BlockFactory blockFactory ) { super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); - this.values = values; + $if(BytesRef)$ + this.vector = new BytesRefArrayVector(values, (int) values.size(), blockFactory); + $else$ + this.vector = new $Type$ArrayVector(values, values.length, blockFactory); + $endif$ } @Override @@ -59,10 +57,10 @@ $endif$ @Override $if(BytesRef)$ public BytesRef getBytesRef(int valueIndex, BytesRef dest) { - return values.get(valueIndex, dest); + return vector.getBytesRef(valueIndex, dest); $else$ public $type$ get$Type$(int valueIndex) { - return values[valueIndex]; + return vector.get$Type$(valueIndex); $endif$ } @@ -104,7 +102,7 @@ $endif$ incRef(); return this; } - // TODO use reference counting to share the values + // TODO use reference counting to share the vector $if(BytesRef)$ final BytesRef scratch = new BytesRef(); $endif$ @@ -128,14 +126,13 @@ $endif$ } } - public static long ramBytesEstimated($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, int[] firstValueIndexes, BitSet nullsMask) { - return BASE_RAM_BYTES_USED + RamUsageEstimator.sizeOf(values) + BlockRamUsageEstimator.sizeOf(firstValueIndexes) - + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); } @Override public long ramBytesUsed() { - return ramBytesEstimated(values, firstValueIndexes, nullsMask); + return ramBytesUsedOnlyBlock() + vector.ramBytesUsed(); } @Override @@ -158,23 +155,20 @@ $endif$ + getPositionCount() + ", mvOrdering=" + mvOrdering() -$if(BytesRef)$ - + ", values=" - + values.size() -$else$ - + ", values=" - + Arrays.toString(values) -$endif$ + + ", vector=" + + vector + ']'; } + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + @Override public void closeInternal() { - $if(BytesRef)$ - blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); - Releasables.closeExpectNoException(values); - $else$ - blockFactory().adjustBreaker(-ramBytesUsed(), true); - $endif$ + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st index 4dd903945d04f..2608816f91f19 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ArrayVector.java.st @@ -21,9 +21,12 @@ $endif$ /** * Vector implementation that stores an array of $type$ values. +$if(BytesRef)$ + * Does not take ownership of the given {@link BytesRefArray} and does not adjust circuit breakers to account for it. +$endif$ * This class is generated. Do not edit it. */ -public final class $Type$ArrayVector extends AbstractVector implements $Type$Vector { +final class $Type$ArrayVector extends AbstractVector implements $Type$Vector { static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance($Type$ArrayVector.class); @@ -34,21 +37,14 @@ $else$ private final $type$[] values; $endif$ - private final $Type$Block block; - - public $Type$ArrayVector($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, int positionCount) { - this(values, positionCount, BlockFactory.getNonBreakingInstance()); - } - - public $Type$ArrayVector($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, int positionCount, BlockFactory blockFactory) { + $Type$ArrayVector($if(BytesRef)$BytesRefArray$else$$type$[]$endif$ values, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.values = values; - this.block = new $Type$VectorBlock(this); } @Override public $Type$Block asBlock() { - return block; + return new $Type$VectorBlock(this); } $if(BytesRef)$ @@ -124,11 +120,9 @@ $endif$ $if(BytesRef)$ @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + public void closeInternal() { + // The circuit breaker that tracks the values {@link BytesRefArray} is adjusted outside + // of this class. blockFactory().adjustBreaker(-ramBytesUsed() + values.bigArraysRamBytesUsed(), true); Releasables.closeExpectNoException(values); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st new file mode 100644 index 0000000000000..989f119bca062 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayBlock.java.st @@ -0,0 +1,145 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.util.$if(boolean)$Bit$else$$Type$$endif$Array; +import org.elasticsearch.core.Releasables; + +import java.util.BitSet; + +/** + * Block implementation that stores values in a {@link $Type$BigArrayVector}. Does not take ownership of the given + * {@link $if(boolean)$Bit$else$$Type$$endif$Array} and does not adjust circuit breakers to account for it. + * This class is generated. Do not edit it. + */ +public final class $Type$BigArrayBlock extends AbstractArrayBlock implements $Type$Block { + + private static final long BASE_RAM_BYTES_USED = 0; // TODO: fix this + private final $Type$BigArrayVector vector; + + public $Type$BigArrayBlock( + $if(boolean)$Bit$else$$Type$$endif$Array values, + int positionCount, + int[] firstValueIndexes, + BitSet nulls, + MvOrdering mvOrdering, + BlockFactory blockFactory + ) { + super(positionCount, firstValueIndexes, nulls, mvOrdering, blockFactory); + this.vector = new $Type$BigArrayVector(values, (int) values.size(), blockFactory); + } + + @Override + public $Type$Vector asVector() { + return null; + } + + @Override + public $type$ get$Type$(int valueIndex) { + return vector.get$Type$(valueIndex); + } + + @Override + public $Type$Block filter(int... positions) { + try (var builder = blockFactory().new$Type$BlockBuilder(positions.length)) { + for (int pos : positions) { + if (isNull(pos)) { + builder.appendNull(); + continue; + } + int valueCount = getValueCount(pos); + int first = getFirstValueIndex(pos); + if (valueCount == 1) { + builder.append$Type$(get$Type$(getFirstValueIndex(pos)$if(BytesRef)$, scratch$endif$)); + } else { + builder.beginPositionEntry(); + for (int c = 0; c < valueCount; c++) { + builder.append$Type$(get$Type$(first + c$if(BytesRef)$, scratch$endif$)); + } + builder.endPositionEntry(); + } + } + return builder.mvOrdering(mvOrdering()).build(); + } + } + + @Override + public ElementType elementType() { + return ElementType.$TYPE$; + } + + @Override + public $Type$Block expand() { + if (firstValueIndexes == null) { + incRef(); + return this; + } + // TODO use reference counting to share the vector + try (var builder = blockFactory().new$Type$BlockBuilder(firstValueIndexes[getPositionCount()])) { + for (int pos = 0; pos < getPositionCount(); pos++) { + if (isNull(pos)) { + builder.appendNull(); + continue; + } + int first = getFirstValueIndex(pos); + int end = first + getValueCount(pos); + for (int i = first; i < end; i++) { + builder.append$Type$(get$Type$(i)); + } + } + return builder.mvOrdering(MvOrdering.DEDUPLICATED_AND_SORTED_ASCENDING).build(); + } + } + + private long ramBytesUsedOnlyBlock() { + return BASE_RAM_BYTES_USED + BlockRamUsageEstimator.sizeOf(firstValueIndexes) + BlockRamUsageEstimator.sizeOfBitSet(nullsMask); + } + + @Override + public long ramBytesUsed() { + return ramBytesUsedOnlyBlock() + RamUsageEstimator.sizeOf(vector); + } + + @Override + public boolean equals(Object obj) { + if (obj instanceof $Type$Block that) { + return $Type$Block.equals(this, that); + } + return false; + } + + @Override + public int hashCode() { + return $Type$Block.hash(this); + } + + @Override + public String toString() { + return getClass().getSimpleName() + + "[positions=" + + getPositionCount() + + ", mvOrdering=" + + mvOrdering() + + ", ramBytesUsed=" + + vector.ramBytesUsed() + + ']'; + } + + @Override + public void allowPassingToDifferentDriver() { + super.allowPassingToDifferentDriver(); + vector.allowPassingToDifferentDriver(); + } + + @Override + public void closeInternal() { + blockFactory().adjustBreaker(-ramBytesUsedOnlyBlock(), true); + Releasables.closeExpectNoException(vector); + } +} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st index 6a231d9ff6bf3..3664471b91e90 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BigArrayVector.java.st @@ -12,30 +12,24 @@ import org.elasticsearch.common.util.$if(boolean)$Bit$else$$Type$$endif$Array; import org.elasticsearch.core.Releasable; /** - * Vector implementation that defers to an enclosed $Type$Array. + * Vector implementation that defers to an enclosed {@link $if(boolean)$Bit$else$$Type$$endif$Array}. + * Does not take ownership of the array and does not adjust circuit breakers to account for it. * This class is generated. Do not edit it. */ public final class $Type$BigArrayVector extends AbstractVector implements $Type$Vector, Releasable { - private static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance($Type$BigArrayVector.class); + private static final long BASE_RAM_BYTES_USED = 0; // FIXME private final $if(boolean)$Bit$else$$Type$$endif$Array values; - private final $Type$Block block; - - public $Type$BigArrayVector($if(boolean)$Bit$else$$Type$$endif$Array values, int positionCount) { - this(values, positionCount, BlockFactory.getNonBreakingInstance()); - } - public $Type$BigArrayVector($if(boolean)$Bit$else$$Type$$endif$Array values, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.values = values; - this.block = new $Type$VectorBlock(this); } @Override public $Type$Block asBlock() { - return block; + return new $Type$VectorBlock(this); } @Override @@ -78,11 +72,9 @@ public final class $Type$BigArrayVector extends AbstractVector implements $Type$ } @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; + public void closeInternal() { + // The circuit breaker that tracks the values {@link $if(boolean)$Bit$else$$Type$$endif$Array} is adjusted outside + // of this class. values.close(); } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st index 2ff537016459c..60cfd1cce7c24 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Block.java.st @@ -22,7 +22,7 @@ import java.io.IOException; * Block that stores $type$ values. * This class is generated. Do not edit it. */ -public sealed interface $Type$Block extends Block permits $Type$ArrayBlock, $Type$VectorBlock, ConstantNullBlock { +public sealed interface $Type$Block extends Block permits $Type$ArrayBlock, $Type$VectorBlock, ConstantNullBlock$if(BytesRef)$$else$, $Type$BigArrayBlock$endif$ { $if(BytesRef)$ BytesRef NULL_VALUE = new BytesRef(); @@ -203,16 +203,6 @@ $endif$ return result; } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#new$Type$BlockBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newBlockBuilder(int estimatedSize) { - return newBlockBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - /** * Returns a builder. * @deprecated use {@link BlockFactory#new$Type$BlockBuilder} @@ -222,16 +212,6 @@ $endif$ return blockFactory.new$Type$BlockBuilder(estimatedSize); } - /** - * Returns a constant block built by the {@link BlockFactory#getNonBreakingInstance non-breaking block factory}. - * @deprecated use {@link BlockFactory#newConstant$Type$BlockWith} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static $Type$Block newConstantBlockWith($type$ value, int positions) { - return newConstantBlockWith(value, positions, BlockFactory.getNonBreakingInstance()); - } - /** * Returns a constant block. * @deprecated use {@link BlockFactory#newConstant$Type$BlockWith} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st index 61527f166cfa9..63d16c09253e5 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-BlockBuilder.java.st @@ -17,6 +17,7 @@ import org.elasticsearch.core.Releasables; $else$ import org.apache.lucene.util.RamUsageEstimator; import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.util.$if(boolean)$Bit$else$$Type$$endif$Array; import java.util.Arrays; $endif$ @@ -246,60 +247,107 @@ $endif$ return this; } +$if(BytesRef)$ + private $Type$Block buildFromBytesArray() { + assert estimatedBytes == 0 || firstValueIndexes != null; + final $Type$Block theBlock; + if (hasNonNullValue && positionCount == 1 && valueCount == 1) { + theBlock = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory).asBlock(); + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(theBlock.ramBytesUsed() - estimatedBytes, false); + Releasables.closeExpectNoException(values); + } else { + if (isDense() && singleValued()) { + theBlock = new $Type$ArrayVector(values, positionCount, blockFactory).asBlock(); + } else { + theBlock = new $Type$ArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); + } + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(theBlock.ramBytesUsed() - estimatedBytes - values.bigArraysRamBytesUsed(), false); + } + return theBlock; + } + +$else$ + private $Type$Block buildBigArraysBlock() { + final $Type$Block theBlock; + $if(boolean)$ + final BitArray array = new BitArray(valueCount, blockFactory.bigArrays()); + for (int i = 0; i < valueCount; i++) { + if (values[i]) { + array.set(i); + } + } + $else$ + final $Type$Array array = blockFactory.bigArrays().new$Type$Array(valueCount, false); + for (int i = 0; i < valueCount; i++) { + array.set(i, values[i]); + } + $endif$ + if (isDense() && singleValued()) { + theBlock = new $Type$BigArrayVector(array, positionCount, blockFactory).asBlock(); + } else { + theBlock = new $Type$BigArrayBlock(array, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); + } + /* + * Update the breaker with the actual bytes used. + * We pass false below even though we've used the bytes. That's weird, + * but if we break here we will throw away the used memory, letting + * it be deallocated. The exception will bubble up and the builder will + * still technically be open, meaning the calling code should close it + * which will return all used memory to the breaker. + */ + blockFactory.adjustBreaker(theBlock.ramBytesUsed() - estimatedBytes - array.ramBytesUsed(), false); + return theBlock; + } +$endif$ + @Override public $Type$Block build() { try { finish(); $Type$Block theBlock; $if(BytesRef)$ - assert estimatedBytes == 0 || firstValueIndexes != null; - if (hasNonNullValue && positionCount == 1 && valueCount == 1) { - theBlock = new ConstantBytesRefVector(BytesRef.deepCopyOf(values.get(0, new BytesRef())), 1, blockFactory).asBlock(); - /* - * Update the breaker with the actual bytes used. - * We pass false below even though we've used the bytes. That's weird, - * but if we break here we will throw away the used memory, letting - * it be deallocated. The exception will bubble up and the builder will - * still technically be open, meaning the calling code should close it - * which will return all used memory to the breaker. - */ - blockFactory.adjustBreaker(theBlock.ramBytesUsed() - estimatedBytes, false); - Releasables.closeExpectNoException(values); - } else { - if (isDense() && singleValued()) { - theBlock = new $Type$ArrayVector(values, positionCount, blockFactory).asBlock(); - } else { - theBlock = new $Type$ArrayBlock(values, positionCount, firstValueIndexes, nullsMask, mvOrdering, blockFactory); - } - /* - * Update the breaker with the actual bytes used. - * We pass false below even though we've used the bytes. That's weird, - * but if we break here we will throw away the used memory, letting - * it be deallocated. The exception will bubble up and the builder will - * still technically be open, meaning the calling code should close it - * which will return all used memory to the breaker. - */ - blockFactory.adjustBreaker(theBlock.ramBytesUsed() - estimatedBytes - values.bigArraysRamBytesUsed(), false); - } + theBlock = buildFromBytesArray(); values = null; $else$ if (hasNonNullValue && positionCount == 1 && valueCount == 1) { theBlock = blockFactory.newConstant$Type$BlockWith(values[0], 1, estimatedBytes); } else { - if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { - values = Arrays.copyOf(values, valueCount); - } - if (isDense() && singleValued()) { - theBlock = blockFactory.new$Type$ArrayVector(values, positionCount, estimatedBytes).asBlock(); + if (estimatedBytes > blockFactory.maxPrimitiveArrayBytes()) { + theBlock = buildBigArraysBlock(); } else { - theBlock = blockFactory.new$Type$ArrayBlock( - values, - positionCount, - firstValueIndexes, - nullsMask, - mvOrdering, - estimatedBytes - ); + if (values.length - valueCount > 1024 || valueCount < (values.length / 2)) { + adjustBreaker(valueCount * elementSize()); + values = Arrays.copyOf(values, valueCount); + adjustBreaker(-values.length * elementSize()); + } + if (isDense() && singleValued()) { + theBlock = blockFactory.new$Type$ArrayVector(values, positionCount, estimatedBytes).asBlock(); + } else { + theBlock = blockFactory.new$Type$ArrayBlock( + values, + positionCount, + firstValueIndexes, + nullsMask, + mvOrdering, + estimatedBytes + ); + } } } $endif$ diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st index f685d38d6459b..625f014a20ffc 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-ConstantVector.java.st @@ -16,7 +16,7 @@ import org.apache.lucene.util.RamUsageEstimator; * Vector implementation that stores a constant $type$ value. * This class is generated. Do not edit it. */ -public final class Constant$Type$Vector extends AbstractVector implements $Type$Vector { +final class Constant$Type$Vector extends AbstractVector implements $Type$Vector { $if(BytesRef)$ static final long BASE_RAM_BYTES_USED = RamUsageEstimator.shallowSizeOfInstance(ConstantBytesRefVector.class) + RamUsageEstimator @@ -27,16 +27,9 @@ $endif$ private final $type$ value; - private final $Type$Block block; - - public Constant$Type$Vector($type$ value, int positionCount) { - this(value, positionCount, BlockFactory.getNonBreakingInstance()); - } - - public Constant$Type$Vector($type$ value, int positionCount, BlockFactory blockFactory) { + Constant$Type$Vector($type$ value, int positionCount, BlockFactory blockFactory) { super(positionCount, blockFactory); this.value = value; - this.block = new $Type$VectorBlock(this); } @Override @@ -50,12 +43,12 @@ $endif$ @Override public $Type$Block asBlock() { - return block; + return new $Type$VectorBlock(this); } @Override public $Type$Vector filter(int... positions) { - return new Constant$Type$Vector(value, positions.length); + return blockFactory().newConstant$Type$Vector(value, positions.length); } @Override @@ -101,13 +94,4 @@ $endif$ public String toString() { return getClass().getSimpleName() + "[positions=" + getPositionCount() + ", value=" + value + ']'; } - - @Override - public void close() { - if (released) { - throw new IllegalStateException("can't release already released vector [" + this + "]"); - } - released = true; - blockFactory().adjustBreaker(-ramBytesUsed(), true); - } } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st index 6ec41ccdc6ab9..c303a8391ad18 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-Vector.java.st @@ -143,46 +143,6 @@ $endif$ } } - /** - * Returns a builder using the {@link BlockFactory#getNonBreakingInstance nonbreaking block factory}. - * @deprecated use {@link BlockFactory#new$Type$VectorBuilder} - */ - // Eventually, we want to remove this entirely, always passing an explicit BlockFactory - @Deprecated - static Builder newVectorBuilder(int estimatedSize) { - return newVectorBuilder(estimatedSize, BlockFactory.getNonBreakingInstance()); - } - -$if(BytesRef)$ - /** - * Creates a builder that grows as needed. - * @deprecated use {@link BlockFactory#new$Type$VectorBuilder} - */ -$else$ - /** - * Creates a builder that grows as needed. Prefer {@link #newVectorFixedBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#new$Type$VectorBuilder} - */ -$endif$ - @Deprecated - static Builder newVectorBuilder(int estimatedSize, BlockFactory blockFactory) { - return blockFactory.new$Type$VectorBuilder(estimatedSize); - } - -$if(BytesRef)$ -$else$ - /** - * Creates a builder that never grows. Prefer this over {@link #newVectorBuilder} - * if you know the size up front because it's faster. - * @deprecated use {@link BlockFactory#new$Type$VectorFixedBuilder} - */ - @Deprecated - static FixedBuilder newVectorFixedBuilder(int size, BlockFactory blockFactory) { - return blockFactory.new$Type$VectorFixedBuilder(size); - } -$endif$ - $if(int)$ /** Create a vector for a range of ints. */ static IntVector range(int startInclusive, int endExclusive, BlockFactory blockFactory) { @@ -197,7 +157,11 @@ $endif$ /** * A builder that grows as needed. */ +$if(BytesRef)$ sealed interface Builder extends Vector.Builder permits $Type$VectorBuilder { +$else$ + sealed interface Builder extends Vector.Builder permits $Type$VectorBuilder, FixedBuilder { +$endif$ /** * Appends a $type$ to the current entry. */ @@ -212,14 +176,12 @@ $else$ /** * A builder that never grows. */ - sealed interface FixedBuilder extends Vector.Builder permits $Type$VectorFixedBuilder { + sealed interface FixedBuilder extends Builder permits $Type$VectorFixedBuilder { /** * Appends a $type$ to the current entry. */ - FixedBuilder append$Type$($type$ value); - @Override - $Type$Vector build(); + FixedBuilder append$Type$($type$ value); } $endif$ } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st index 8772e633ff14b..4bc3c66b65743 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/X-VectorBlock.java.st @@ -13,7 +13,7 @@ $endif$ import org.elasticsearch.core.Releasables; /** - * Block view of a $Type$Vector. + * Block view of a {@link $Type$Vector}. Cannot represent multi-values or nulls. * This class is generated. Do not edit it. */ public final class $Type$VectorBlock extends AbstractVectorBlock implements $Type$Block { @@ -81,11 +81,6 @@ $endif$ return getClass().getSimpleName() + "[vector=" + vector + "]"; } - @Override - public boolean isReleased() { - return super.isReleased() || vector.isReleased(); - } - @Override public void closeInternal() { assert (vector.isReleased() == false) : "can't release block [" + this + "] containing already released vector"; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/IdFieldIndexFieldData.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/IdFieldIndexFieldData.java deleted file mode 100644 index d91c758ab3bd9..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/IdFieldIndexFieldData.java +++ /dev/null @@ -1,129 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.lucene; - -import org.apache.lucene.index.LeafReaderContext; -import org.apache.lucene.search.SortField; -import org.apache.lucene.util.BytesRef; -import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.LeafFieldData; -import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.index.fieldvisitor.LeafStoredFieldLoader; -import org.elasticsearch.index.fieldvisitor.StoredFieldLoader; -import org.elasticsearch.index.mapper.IdFieldMapper; -import org.elasticsearch.script.field.DocValuesScriptFieldFactory; -import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.search.MultiValueMode; -import org.elasticsearch.search.aggregations.support.ValuesSourceType; -import org.elasticsearch.search.sort.BucketedSort; -import org.elasticsearch.search.sort.SortOrder; - -import java.io.IOException; -import java.util.Set; - -public class IdFieldIndexFieldData implements IndexFieldData { - - private static final String FIELD_NAME = IdFieldMapper.NAME; - private final ValuesSourceType valuesSourceType; - private final StoredFieldLoader loader; - - protected IdFieldIndexFieldData(ValuesSourceType valuesSourceType) { - this.valuesSourceType = valuesSourceType; - this.loader = StoredFieldLoader.create(false, Set.of(FIELD_NAME)); - } - - @Override - public String getFieldName() { - return FIELD_NAME; - } - - @Override - public ValuesSourceType getValuesSourceType() { - return valuesSourceType; - } - - @Override - public final IdFieldLeafFieldData load(LeafReaderContext context) { - try { - return loadDirect(context); - } catch (Exception e) { - throw ExceptionsHelper.convertToElastic(e); - } - } - - @Override - public final IdFieldLeafFieldData loadDirect(LeafReaderContext context) throws Exception { - return new IdFieldLeafFieldData(loader.getLoader(context, null)); - } - - @Override - public SortField sortField(Object missingValue, MultiValueMode sortMode, XFieldComparatorSource.Nested nested, boolean reverse) { - throw new IllegalArgumentException("not supported for stored field fallback"); - } - - @Override - public BucketedSort newBucketedSort( - BigArrays bigArrays, - Object missingValue, - MultiValueMode sortMode, - XFieldComparatorSource.Nested nested, - SortOrder sortOrder, - DocValueFormat format, - int bucketSize, - BucketedSort.ExtraData extra - ) { - throw new IllegalArgumentException("not supported for stored field fallback"); - } - - class IdFieldLeafFieldData implements LeafFieldData { - private final LeafStoredFieldLoader loader; - - protected IdFieldLeafFieldData(LeafStoredFieldLoader loader) { - this.loader = loader; - } - - @Override - public DocValuesScriptFieldFactory getScriptFieldFactory(String name) { - throw new IllegalArgumentException("not supported for _id field"); - } - - @Override - public long ramBytesUsed() { - return 0L; - } - - @Override - public void close() {} - - @Override - public SortedBinaryDocValues getBytesValues() { - return new SortedBinaryDocValues() { - private String id; - - @Override - public boolean advanceExact(int doc) throws IOException { - loader.advanceTo(doc); - id = loader.id(); - return id != null; - } - - @Override - public int docValueCount() { - return 1; - } - - @Override - public BytesRef nextValue() throws IOException { - return new BytesRef(id); - } - }; - } - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java index 6536b08cd2419..21b2a4cfaeb0b 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneOperator.java @@ -16,6 +16,7 @@ import org.apache.lucene.search.ScoreMode; import org.apache.lucene.search.Weight; import org.apache.lucene.util.Bits; +import org.elasticsearch.TransportVersions; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; @@ -30,8 +31,13 @@ import java.io.IOException; import java.io.UncheckedIOException; +import java.util.Collections; +import java.util.HashSet; import java.util.Objects; +import java.util.Set; +import java.util.TreeSet; import java.util.function.Function; +import java.util.stream.Collectors; public abstract class LuceneOperator extends SourceOperator { private static final Logger logger = LogManager.getLogger(LuceneOperator.class); @@ -40,10 +46,16 @@ public abstract class LuceneOperator extends SourceOperator { protected final BlockFactory blockFactory; - private int processSlices; + /** + * Count of the number of slices processed. + */ + private int processedSlices; final int maxPageSize; private final LuceneSliceQueue sliceQueue; + private final Set processedQueries = new HashSet<>(); + private final Set processedShards = new HashSet<>(); + private LuceneSlice currentSlice; private int sliceIndex; @@ -52,7 +64,7 @@ public abstract class LuceneOperator extends SourceOperator { int pagesEmitted; boolean doneCollecting; - public LuceneOperator(BlockFactory blockFactory, int maxPageSize, LuceneSliceQueue sliceQueue) { + protected LuceneOperator(BlockFactory blockFactory, int maxPageSize, LuceneSliceQueue sliceQueue) { this.blockFactory = blockFactory; this.maxPageSize = maxPageSize; this.sliceQueue = sliceQueue; @@ -73,18 +85,23 @@ LuceneScorer getCurrentOrLoadNextScorer() { if (currentSlice == null) { doneCollecting = true; return null; - } else { - processSlices++; } if (currentSlice.numLeaves() == 0) { continue; } + processedSlices++; + processedShards.add( + currentSlice.searchContext().getSearchExecutionContext().getFullyQualifiedIndex().getName() + + ":" + + currentSlice.searchContext().getSearchExecutionContext().getShardId() + ); } final PartialLeafReaderContext partialLeaf = currentSlice.getLeaf(sliceIndex++); logger.trace("Starting {}", partialLeaf); final LeafReaderContext leaf = partialLeaf.leafReaderContext(); if (currentScorer == null || currentScorer.leafReaderContext() != leaf) { final Weight weight = currentSlice.weight().get(); + processedQueries.add(weight.getQuery()); currentScorer = new LuceneScorer(currentSlice.shardIndex(), currentSlice.searchContext(), weight, leaf); } assert currentScorer.maxPosition <= partialLeaf.maxDoc() : currentScorer.maxPosition + ">" + partialLeaf.maxDoc(); @@ -190,6 +207,8 @@ public static class Status implements Operator.Status { ); private final int processedSlices; + private final Set processedQueries; + private final Set processedShards; private final int totalSlices; private final int pagesEmitted; private final int sliceIndex; @@ -198,7 +217,9 @@ public static class Status implements Operator.Status { private final int current; private Status(LuceneOperator operator) { - processedSlices = operator.processSlices; + processedSlices = operator.processedSlices; + processedQueries = operator.processedQueries.stream().map(Query::toString).collect(Collectors.toCollection(TreeSet::new)); + processedShards = new TreeSet<>(operator.processedShards); sliceIndex = operator.sliceIndex; totalSlices = operator.sliceQueue.totalSlices(); LuceneSlice slice = operator.currentSlice; @@ -219,8 +240,20 @@ private Status(LuceneOperator operator) { pagesEmitted = operator.pagesEmitted; } - Status(int processedSlices, int sliceIndex, int totalSlices, int pagesEmitted, int sliceMin, int sliceMax, int current) { + Status( + int processedSlices, + Set processedQueries, + Set processedShards, + int sliceIndex, + int totalSlices, + int pagesEmitted, + int sliceMin, + int sliceMax, + int current + ) { this.processedSlices = processedSlices; + this.processedQueries = processedQueries; + this.processedShards = processedShards; this.sliceIndex = sliceIndex; this.totalSlices = totalSlices; this.pagesEmitted = pagesEmitted; @@ -231,6 +264,13 @@ private Status(LuceneOperator operator) { Status(StreamInput in) throws IOException { processedSlices = in.readVInt(); + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_STATUS_INCLUDE_LUCENE_QUERIES)) { + processedQueries = in.readCollectionAsSet(StreamInput::readString); + processedShards = in.readCollectionAsSet(StreamInput::readString); + } else { + processedQueries = Collections.emptySet(); + processedShards = Collections.emptySet(); + } sliceIndex = in.readVInt(); totalSlices = in.readVInt(); pagesEmitted = in.readVInt(); @@ -242,6 +282,10 @@ private Status(LuceneOperator operator) { @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(processedSlices); + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_STATUS_INCLUDE_LUCENE_QUERIES)) { + out.writeCollection(processedQueries, StreamOutput::writeString); + out.writeCollection(processedShards, StreamOutput::writeString); + } out.writeVInt(sliceIndex); out.writeVInt(totalSlices); out.writeVInt(pagesEmitted); @@ -259,6 +303,14 @@ public int processedSlices() { return processedSlices; } + public Set processedQueries() { + return processedQueries; + } + + public Set processedShards() { + return processedShards; + } + public int sliceIndex() { return sliceIndex; } @@ -287,6 +339,8 @@ public int current() { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field("processed_slices", processedSlices); + builder.field("processed_queries", processedQueries); + builder.field("processed_shards", processedShards); builder.field("slice_index", sliceIndex); builder.field("total_slices", totalSlices); builder.field("pages_emitted", pagesEmitted); @@ -302,6 +356,8 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Status status = (Status) o; return processedSlices == status.processedSlices + && processedQueries.equals(status.processedQueries) + && processedShards.equals(status.processedShards) && sliceIndex == status.sliceIndex && totalSlices == status.totalSlices && pagesEmitted == status.pagesEmitted diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java index 7b2b276a619c6..0b92b88e98b70 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneSourceOperator.java @@ -95,7 +95,7 @@ public LuceneSourceOperator(BlockFactory blockFactory, int maxPageSize, LuceneSl super(blockFactory, maxPageSize, sliceQueue); this.minPageSize = Math.max(1, maxPageSize / 2); this.remainingDocs = limit; - this.docsBuilder = IntVector.newVectorBuilder(Math.min(limit, maxPageSize), blockFactory); + this.docsBuilder = blockFactory.newIntVectorBuilder(Math.min(limit, maxPageSize)); this.leafCollector = new LeafCollector() { @Override public void setScorer(Scorable scorer) { @@ -152,7 +152,7 @@ public Page getOutput() { shard = IntBlock.newConstantBlockWith(scorer.shardIndex(), currentPagePos, blockFactory); leaf = IntBlock.newConstantBlockWith(scorer.leafReaderContext().ord, currentPagePos, blockFactory); docs = docsBuilder.build(); - docsBuilder = IntVector.newVectorBuilder(Math.min(remainingDocs, maxPageSize), blockFactory); + docsBuilder = blockFactory.newIntVectorBuilder(Math.min(remainingDocs, maxPageSize)); page = new Page(currentPagePos, new DocVector(shard.asVector(), leaf.asVector(), docs, true).asBlock()); } finally { if (page == null) { diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java index 9624fa48ef20d..bdaaa6c896009 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperator.java @@ -200,8 +200,8 @@ private Page emit(boolean startEmitting) { IntVector docs = null; Page page = null; try ( - IntVector.Builder currentSegmentBuilder = IntVector.newVectorBuilder(size, blockFactory); - IntVector.Builder currentDocsBuilder = IntVector.newVectorBuilder(size, blockFactory) + IntVector.Builder currentSegmentBuilder = blockFactory.newIntVectorFixedBuilder(size); + IntVector.Builder currentDocsBuilder = blockFactory.newIntVectorFixedBuilder(size) ) { int start = offset; offset += size; diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TextValueSource.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TextValueSource.java deleted file mode 100644 index 04dbcd91c18c8..0000000000000 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/lucene/TextValueSource.java +++ /dev/null @@ -1,42 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.compute.lucene; - -import org.apache.lucene.index.LeafReaderContext; -import org.elasticsearch.index.fielddata.IndexFieldData; -import org.elasticsearch.index.fielddata.LeafFieldData; -import org.elasticsearch.index.fielddata.SortedBinaryDocValues; -import org.elasticsearch.script.field.TextDocValuesField; -import org.elasticsearch.search.aggregations.support.ValuesSource; - -public class TextValueSource extends ValuesSource.Bytes { - - private final IndexFieldData indexFieldData; - - public TextValueSource(IndexFieldData indexFieldData) { - this.indexFieldData = indexFieldData; - } - - @Override - public SortedBinaryDocValues bytesValues(LeafReaderContext leafReaderContext) { - String fieldName = indexFieldData.getFieldName(); - LeafFieldData fieldData = indexFieldData.load(leafReaderContext); - return ((TextDocValuesFieldWrapper) fieldData.getScriptFieldFactory(fieldName)).bytesValues(); - } - - /** Wrapper around TextDocValuesField that provides access to the SortedBinaryDocValues. */ - static final class TextDocValuesFieldWrapper extends TextDocValuesField { - TextDocValuesFieldWrapper(SortedBinaryDocValues input, String name) { - super(input, name); - } - - SortedBinaryDocValues bytesValues() { - return input; - } - } -} diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverTaskRunner.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverTaskRunner.java index 1293118680824..38d879f8f7ad4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverTaskRunner.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/DriverTaskRunner.java @@ -11,9 +11,9 @@ import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; +import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.compute.OwningChannelActionListener; import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; import org.elasticsearch.tasks.TaskId; @@ -117,7 +117,7 @@ public Status getStatus() { private record DriverRequestHandler(TransportService transportService) implements TransportRequestHandler { @Override public void messageReceived(DriverRequest request, TransportChannel channel, Task task) { - var listener = new OwningChannelActionListener(channel); + var listener = new ChannelActionListener(channel); Driver.start( transportService.getThreadPool().getThreadContext(), request.executor, diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java index 2a6a3c9b6210b..10f23ed29094f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/EvalOperator.java @@ -75,27 +75,22 @@ interface Factory { public static final ExpressionEvaluator.Factory CONSTANT_NULL_FACTORY = new ExpressionEvaluator.Factory() { @Override public ExpressionEvaluator get(DriverContext driverContext) { - return CONSTANT_NULL; - } + return new ExpressionEvaluator() { + @Override + public Block eval(Page page) { + return driverContext.blockFactory().newConstantNullBlock(page.getPositionCount()); + } - @Override - public String toString() { - return CONSTANT_NULL.toString(); - } - }; + @Override + public void close() { - public static final ExpressionEvaluator CONSTANT_NULL = new ExpressionEvaluator() { - @Override - public Block eval(Page page) { - return Block.constantNullBlock(page.getPositionCount()); + } + }; } @Override public String toString() { return "ConstantNull"; } - - @Override - public void close() {} }; } diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java index 4fb90ddb57e25..a895525add46f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/OrdinalsGroupingOperator.java @@ -475,7 +475,7 @@ private static class ValuesAggregator implements Releasable { DriverContext driverContext ) { this.extractor = new ValuesSourceReaderOperator( - BlockFactory.getNonBreakingInstance(), + driverContext.blockFactory(), List.of(new ValuesSourceReaderOperator.FieldInfo(groupingField, blockLoaders)), shardContexts, docChannel diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java index ff124021ea3ad..4b4379eb6a4d8 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/RowOperator.java @@ -22,8 +22,7 @@ public record RowOperatorFactory(List objects) implements SourceOperator @Override public SourceOperator get(DriverContext driverContext) { - // We aren't yet ready to use the read block factory - return new RowOperator(BlockFactory.getNonBreakingInstance(), objects); + return new RowOperator(driverContext.blockFactory(), objects); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java index 313ec0b682602..c1029db4c32e4 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/ThrowingDriverContext.java @@ -7,6 +7,7 @@ package org.elasticsearch.compute.operator; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.ByteArray; import org.elasticsearch.common.util.DoubleArray; @@ -18,7 +19,7 @@ public class ThrowingDriverContext extends DriverContext { public ThrowingDriverContext() { - super(new ThrowingBigArrays(), BlockFactory.getNonBreakingInstance()); + super(new ThrowingBigArrays(), BlockFactory.getInstance(new NoopCircuitBreaker("throwing-context"), new ThrowingBigArrays())); } @Override diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java index 8fb38ccf907d6..245121028d80f 100644 --- a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/operator/exchange/ExchangeService.java @@ -13,6 +13,7 @@ import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.component.AbstractLifecycleComponent; import org.elasticsearch.common.component.Lifecycle; @@ -21,13 +22,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractAsyncTask; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; -import org.elasticsearch.compute.OwningChannelActionListener; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; import org.elasticsearch.core.TimeValue; -import org.elasticsearch.tasks.CancellableTask; import org.elasticsearch.tasks.Task; -import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; import org.elasticsearch.transport.TransportRequest; @@ -43,7 +41,7 @@ /** * {@link ExchangeService} is responsible for exchanging pages between exchange sinks and sources on the same or different nodes. - * It holds a map of {@link ExchangeSourceHandler} and {@link ExchangeSinkHandler} instances for each node in the cluster. + * It holds a map of {@link ExchangeSinkHandler} instances for each node in the cluster to serve {@link ExchangeRequest}s * To connect exchange sources to exchange sinks, use the {@link ExchangeSourceHandler#addRemoteSink(RemoteSink, int)} method. */ public final class ExchangeService extends AbstractLifecycleComponent { @@ -66,7 +64,6 @@ public final class ExchangeService extends AbstractLifecycleComponent { private final BlockFactory blockFactory; private final Map sinks = ConcurrentCollections.newConcurrentMap(); - private final Map sources = ConcurrentCollections.newConcurrentMap(); private final InactiveSinksReaper inactiveSinksReaper; @@ -125,20 +122,6 @@ public void finishSinkHandler(String exchangeId, Exception failure) { } } - /** - * Creates an {@link ExchangeSourceHandler} for the specified exchange id. - * - * @throws IllegalStateException if a source handler for the given id already exists - */ - public ExchangeSourceHandler createSourceHandler(String exchangeId, int maxBufferSize, String fetchExecutor) { - ExchangeSourceHandler sourceHandler = new ExchangeSourceHandler(maxBufferSize, threadPool.executor(fetchExecutor)); - if (sources.putIfAbsent(exchangeId, sourceHandler) != null) { - throw new IllegalStateException("source exchanger for id [" + exchangeId + "] already exists"); - } - sourceHandler.addCompletionListener(ActionListener.releasing(() -> sources.remove(exchangeId))); - return sourceHandler; - } - /** * Opens a remote sink handler on the remote node for the given session ID. */ @@ -193,15 +176,11 @@ private class ExchangeTransportAction implements TransportRequestHandler listener = new OwningChannelActionListener<>(channel); + ActionListener listener = new ChannelActionListener<>(channel); final ExchangeSinkHandler sinkHandler = sinks.get(exchangeId); if (sinkHandler == null) { listener.onResponse(new ExchangeResponse(null, true)); } else { - // the data-node request hasn't arrived yet; use the task framework to cancel the request if needed. - if (sinkHandler.hasData() == false) { - ((CancellableTask) task).addListener(() -> sinkHandler.onFailure(new TaskCancelledException("task cancelled"))); - } sinkHandler.fetchPageAsync(request.sourcesFinished(), listener); } } @@ -285,7 +264,7 @@ public void fetchPageAsync(boolean allSourcesFinished, ActionListener NULL_VALUE).forEach(blockBuilder::appendBytesRef); - assertSingleValueDenseBlock(blockBuilder.build()); + BytesRefBlock block = blockBuilder.build(); + assertSingleValueDenseBlock(block); + block.close(); } } } public void testSmallSingleValueDenseGrowthBoolean() { for (int initialSize : List.of(0, 1, 2, 3, 4, 5)) { - try (var blockBuilder = BooleanBlock.newBlockBuilder(initialSize)) { + try (var blockBuilder = blockFactory.newBooleanBlockBuilder(initialSize)) { IntStream.range(0, 10).forEach(i -> blockBuilder.appendBoolean(i % 3 == 0)); - assertSingleValueDenseBlock(blockBuilder.build()); + BooleanBlock block = blockBuilder.build(); + assertSingleValueDenseBlock(block); + block.close(); } } } @@ -191,9 +207,8 @@ public void testIntBlock() { } try ( - IntVector.Builder vectorBuilder = IntVector.newVectorBuilder( - randomBoolean() ? randomIntBetween(1, positionCount) : positionCount, - blockFactory + IntVector.Builder vectorBuilder = blockFactory.newIntVectorBuilder( + randomBoolean() ? randomIntBetween(1, positionCount) : positionCount ) ) { IntStream.range(0, positionCount).forEach(vectorBuilder::appendInt); @@ -318,7 +333,7 @@ public void testDoubleBlock() { assertThat((double) pos, is(block.getDouble(pos))); assertSingleValueDenseBlock(block); - try (DoubleBlock.Builder blockBuilder = DoubleBlock.newBlockBuilder(1)) { + try (DoubleBlock.Builder blockBuilder = blockFactory.newDoubleBlockBuilder(1)) { DoubleBlock copy = blockBuilder.copyFrom(block, 0, block.getPositionCount()).build(); assertThat(copy, equalTo(block)); releaseAndAssertBreaker(block, copy); @@ -403,7 +418,7 @@ public void testBytesRefBlock() { } assertSingleValueDenseBlock(block); - try (BytesRefBlock.Builder blockBuilder = BytesRefBlock.newBlockBuilder(1)) { + try (BytesRefBlock.Builder blockBuilder = blockFactory.newBytesRefBlockBuilder(1)) { BytesRefBlock copy = blockBuilder.copyFrom(block, 0, block.getPositionCount()).build(); assertThat(copy, equalTo(block)); releaseAndAssertBreaker(block, copy); @@ -524,7 +539,7 @@ public void testBooleanBlock() { assertThat(block.getBoolean(positionCount - 1), is((positionCount - 1) % 10 == 0)); assertSingleValueDenseBlock(block); - try (BooleanBlock.Builder blockBuilder = BooleanBlock.newBlockBuilder(1)) { + try (BooleanBlock.Builder blockBuilder = blockFactory.newBooleanBlockBuilder(1)) { BooleanBlock copy = blockBuilder.copyFrom(block, 0, block.getPositionCount()).build(); assertThat(copy, equalTo(block)); releaseAndAssertBreaker(block, copy); @@ -599,7 +614,7 @@ public void testConstantNullBlock() { public void testSingleValueSparseInt() { int positionCount = randomIntBetween(2, 16 * 1024); final int builderEstimateSize = randomBoolean() ? randomIntBetween(1, positionCount) : positionCount; - try (var blockBuilder = IntBlock.newBlockBuilder(builderEstimateSize)) { + try (var blockBuilder = blockFactory.newIntBlockBuilder(builderEstimateSize)) { int actualValueCount = 0; int[] values = new int[positionCount]; @@ -627,13 +642,14 @@ public void testSingleValueSparseInt() { } assertThat(block.nullValuesCount(), is(nullCount)); assertThat(block.asVector(), nullCount > 0 ? is(nullValue()) : is(notNullValue())); + block.close(); } } public void testSingleValueSparseLong() { int positionCount = randomIntBetween(2, 16 * 1024); final int builderEstimateSize = randomBoolean() ? randomIntBetween(1, positionCount) : positionCount; - try (var blockBuilder = LongBlock.newBlockBuilder(builderEstimateSize)) { + try (var blockBuilder = blockFactory.newLongBlockBuilder(builderEstimateSize)) { int actualValueCount = 0; long[] values = new long[positionCount]; @@ -660,13 +676,14 @@ public void testSingleValueSparseLong() { } assertThat(block.nullValuesCount(), is(nullCount)); assertThat(block.asVector(), nullCount > 0 ? is(nullValue()) : is(notNullValue())); + block.close(); } } public void testSingleValueSparseDouble() { int positionCount = randomIntBetween(2, 16 * 1024); final int builderEstimateSize = randomBoolean() ? randomIntBetween(1, positionCount) : positionCount; - try (var blockBuilder = DoubleBlock.newBlockBuilder(builderEstimateSize)) { + try (var blockBuilder = blockFactory.newDoubleBlockBuilder(builderEstimateSize)) { int actualValueCount = 0; double[] values = new double[positionCount]; @@ -693,13 +710,14 @@ public void testSingleValueSparseDouble() { } assertThat(block.nullValuesCount(), is(nullCount)); assertThat(block.asVector(), nullCount > 0 ? is(nullValue()) : is(notNullValue())); + block.close(); } } public void testSingleValueSparseBoolean() { int positionCount = randomIntBetween(2, 16 * 1024); final int builderEstimateSize = randomBoolean() ? randomIntBetween(1, positionCount) : positionCount; - try (var blockBuilder = BooleanBlock.newBlockBuilder(builderEstimateSize)) { + try (var blockBuilder = blockFactory.newBooleanBlockBuilder(builderEstimateSize)) { boolean[] values = new boolean[positionCount]; int actualValueCount = 0; @@ -726,6 +744,7 @@ public void testSingleValueSparseBoolean() { } assertThat(block.nullValuesCount(), is(nullCount)); assertThat(block.asVector(), nullCount > 0 ? is(nullValue()) : is(notNullValue())); + block.close(); } } @@ -733,8 +752,8 @@ public void testToStringSmall() { final int estimatedSize = randomIntBetween(1024, 4096); try ( - var boolBlock = BooleanBlock.newBlockBuilder(estimatedSize).appendBoolean(true).appendBoolean(false).build(); - var boolVector = BooleanVector.newVectorBuilder(estimatedSize).appendBoolean(true).appendBoolean(false).build() + var boolBlock = blockFactory.newBooleanBlockBuilder(estimatedSize).appendBoolean(true).appendBoolean(false).build(); + var boolVector = blockFactory.newBooleanVectorBuilder(estimatedSize).appendBoolean(true).appendBoolean(false).build() ) { for (Object obj : List.of(boolVector, boolBlock, boolBlock.asVector())) { String s = obj.toString(); @@ -744,8 +763,8 @@ public void testToStringSmall() { } try ( - var intBlock = IntBlock.newBlockBuilder(estimatedSize).appendInt(1).appendInt(2).build(); - var intVector = IntVector.newVectorBuilder(estimatedSize).appendInt(1).appendInt(2).build() + var intBlock = blockFactory.newIntBlockBuilder(estimatedSize).appendInt(1).appendInt(2).build(); + var intVector = blockFactory.newIntVectorBuilder(estimatedSize).appendInt(1).appendInt(2).build() ) { for (Object obj : List.of(intVector, intBlock, intBlock.asVector())) { String s = obj.toString(); @@ -753,25 +772,38 @@ public void testToStringSmall() { assertThat(s, containsString("positions=2")); } for (IntBlock block : List.of(intBlock, intVector.asBlock())) { - assertThat(block.filter(0).toString(), containsString("IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]]")); - assertThat(block.filter(1).toString(), containsString("IntVectorBlock[vector=ConstantIntVector[positions=1, value=2]]")); - assertThat( - block.filter(0, 1).toString(), - containsString("IntVectorBlock[vector=IntArrayVector[positions=2, values=[1, 2]]]") - ); - assertThat(block.filter().toString(), containsString("IntVectorBlock[vector=IntArrayVector[positions=0, values=[]]]")); + try (var filter = block.filter(0)) { + assertThat(filter.toString(), containsString("IntVectorBlock[vector=ConstantIntVector[positions=1, value=1]]")); + } + try (var filter = block.filter(1)) { + assertThat(filter.toString(), containsString("IntVectorBlock[vector=ConstantIntVector[positions=1, value=2]]")); + } + try (var filter = block.filter(0, 1)) { + assertThat(filter.toString(), containsString("IntVectorBlock[vector=IntArrayVector[positions=2, values=[1, 2]]]")); + } + try (var filter = block.filter()) { + assertThat(filter.toString(), containsString("IntVectorBlock[vector=IntArrayVector[positions=0, values=[]]]")); + } } for (IntVector vector : List.of(intVector, intBlock.asVector())) { - assertThat(vector.filter(0).toString(), containsString("ConstantIntVector[positions=1, value=1]")); - assertThat(vector.filter(1).toString(), containsString("ConstantIntVector[positions=1, value=2]")); - assertThat(vector.filter(0, 1).toString(), containsString("IntArrayVector[positions=2, values=[1, 2]]")); - assertThat(vector.filter().toString(), containsString("IntArrayVector[positions=0, values=[]]")); + try (var filter = vector.filter(0)) { + assertThat(filter.toString(), containsString("ConstantIntVector[positions=1, value=1]")); + } + try (IntVector filter = vector.filter(1)) { + assertThat(filter.toString(), containsString("ConstantIntVector[positions=1, value=2]")); + } + try (IntVector filter = vector.filter(0, 1)) { + assertThat(filter.toString(), containsString("IntArrayVector[positions=2, values=[1, 2]]")); + } + try (IntVector filter = vector.filter()) { + assertThat(filter.toString(), containsString("IntArrayVector[positions=0, values=[]]")); + } } } try ( - var longBlock = LongBlock.newBlockBuilder(estimatedSize).appendLong(10L).appendLong(20L).build(); - var longVector = LongVector.newVectorBuilder(estimatedSize).appendLong(10L).appendLong(20L).build() + var longBlock = blockFactory.newLongBlockBuilder(estimatedSize).appendLong(10L).appendLong(20L).build(); + var longVector = blockFactory.newLongVectorBuilder(estimatedSize).appendLong(10L).appendLong(20L).build() ) { for (Object obj : List.of(longVector, longBlock, longBlock.asVector())) { String s = obj.toString(); @@ -781,8 +813,8 @@ public void testToStringSmall() { } try ( - var doubleBlock = DoubleBlock.newBlockBuilder(estimatedSize).appendDouble(3.3).appendDouble(4.4).build(); - var doubleVector = DoubleVector.newVectorBuilder(estimatedSize).appendDouble(3.3).appendDouble(4.4).build() + var doubleBlock = blockFactory.newDoubleBlockBuilder(estimatedSize).appendDouble(3.3).appendDouble(4.4).build(); + var doubleVector = blockFactory.newDoubleVectorBuilder(estimatedSize).appendDouble(3.3).appendDouble(4.4).build() ) { for (Object obj : List.of(doubleVector, doubleBlock, doubleBlock.asVector())) { String s = obj.toString(); @@ -793,8 +825,8 @@ public void testToStringSmall() { assert new BytesRef("1a").toString().equals("[31 61]") && new BytesRef("2b").toString().equals("[32 62]"); try ( - var blockBuilder = BytesRefBlock.newBlockBuilder(estimatedSize); - var vectorBuilder = BytesRefVector.newVectorBuilder(estimatedSize) + var blockBuilder = blockFactory.newBytesRefBlockBuilder(estimatedSize); + var vectorBuilder = blockFactory.newBytesRefVectorBuilder(estimatedSize) ) { var bytesRefBlock = blockBuilder.appendBytesRef(new BytesRef("1a")).appendBytesRef(new BytesRef("2b")).build(); var bytesRefVector = vectorBuilder.appendBytesRef(new BytesRef("1a")).appendBytesRef(new BytesRef("2b")).build(); @@ -802,6 +834,7 @@ public void testToStringSmall() { String s = obj.toString(); assertThat(s, containsString("positions=2")); } + Releasables.close(bytesRefBlock, bytesRefVector); } } @@ -846,7 +879,7 @@ public static RandomBlock randomBlock( int maxDupsPerPosition ) { return randomBlock( - BlockFactory.getNonBreakingInstance(), + TestBlockFactory.getNonBreakingInstance(), elementType, positionCount, nullAllowed, @@ -1000,13 +1033,7 @@ void releaseAndAssertBreaker(Vector vector) { static void assertCannotDoubleRelease(Block block) { var ex = expectThrows(IllegalStateException.class, () -> block.close()); - assertThat(ex.getMessage(), containsString("can't release already released block")); - } - - static void assertCannotReleaseIfVectorAlreadyReleased(Block block) { - var ex = expectThrows(IllegalStateException.class, () -> block.close()); - assertThat(ex.getMessage(), containsString("can't release block")); - assertThat(ex.getMessage(), containsString("containing already released vector")); + assertThat(ex.getMessage(), containsString("can't release already released object")); } static void assertCannotReadFromPage(Page page) { @@ -1041,6 +1068,13 @@ public void testRefCountingArrayBlock() { assertThat(breaker.getUsed(), is(0L)); } + public void testRefCountingBigArrayBlock() { + Block block = randomBigArrayBlock(); + assertThat(breaker.getUsed(), greaterThan(0L)); + assertRefCountingBehavior(block); + assertThat(breaker.getUsed(), is(0L)); + } + public void testRefCountingConstantNullBlock() { Block block = blockFactory.newConstantNullBlock(10); assertThat(breaker.getUsed(), greaterThan(0L)); @@ -1057,83 +1091,165 @@ public void testRefCountingDocBlock() { } public void testRefCountingVectorBlock() { - Block block = randomNonDocVector().asBlock(); + Block block = randomConstantVector().asBlock(); assertThat(breaker.getUsed(), greaterThan(0L)); assertRefCountingBehavior(block); assertThat(breaker.getUsed(), is(0L)); } - // Take a block with exactly 1 reference and assert that ref counting works fine. - static void assertRefCountingBehavior(Block b) { - assertTrue(b.hasReferences()); + public void testRefCountingArrayVector() { + Vector vector = randomArrayVector(); + assertThat(breaker.getUsed(), greaterThan(0L)); + assertRefCountingBehavior(vector); + assertThat(breaker.getUsed(), is(0L)); + } + + public void testRefCountingBigArrayVector() { + Vector vector = randomBigArrayVector(); + assertThat(breaker.getUsed(), greaterThan(0L)); + assertRefCountingBehavior(vector); + assertThat(breaker.getUsed(), is(0L)); + } + + public void testRefCountingConstantVector() { + Vector vector = randomConstantVector(); + assertThat(breaker.getUsed(), greaterThan(0L)); + assertRefCountingBehavior(vector); + assertThat(breaker.getUsed(), is(0L)); + } + + public void testRefCountingDocVector() { + int positionCount = randomIntBetween(0, 100); + DocVector vector = new DocVector(intVector(positionCount), intVector(positionCount), intVector(positionCount), true); + assertThat(breaker.getUsed(), greaterThan(0L)); + assertRefCountingBehavior(vector); + assertThat(breaker.getUsed(), is(0L)); + } + + /** + * Take an object with exactly 1 reference and assert that ref counting works fine. + * Assumes that {@link Releasable#close()} and {@link RefCounted#decRef()} are equivalent. + */ + static void assertRefCountingBehavior(T object) { + assertTrue(object.hasReferences()); int numShallowCopies = randomIntBetween(0, 15); for (int i = 0; i < numShallowCopies; i++) { if (randomBoolean()) { - b.incRef(); + object.incRef(); } else { - assertTrue(b.tryIncRef()); + assertTrue(object.tryIncRef()); } } for (int i = 0; i < numShallowCopies; i++) { if (randomBoolean()) { - b.close(); + object.close(); } else { // closing and decRef'ing must be equivalent - assertFalse(b.decRef()); + assertFalse(object.decRef()); } - assertTrue(b.hasReferences()); + assertTrue(object.hasReferences()); } if (randomBoolean()) { - b.close(); + object.close(); } else { - assertTrue(b.decRef()); + assertTrue(object.decRef()); } - assertFalse(b.hasReferences()); - assertFalse(b.tryIncRef()); + assertFalse(object.hasReferences()); + assertFalse(object.tryIncRef()); - expectThrows(IllegalStateException.class, b::close); - expectThrows(IllegalStateException.class, b::incRef); + expectThrows(IllegalStateException.class, object::close); + expectThrows(IllegalStateException.class, object::incRef); } - public void testReleasedVectorInvalidatesBlockState() { - Vector vector = randomNonDocVector(); - Block block = vector.asBlock(); - - int numRefs = randomIntBetween(1, 10); - for (int i = 0; i < numRefs - 1; i++) { - block.incRef(); - } - - vector.close(); - assertEquals(false, block.tryIncRef()); - expectThrows(IllegalStateException.class, block::close); - expectThrows(IllegalStateException.class, block::incRef); + private IntVector intVector(int positionCount) { + return blockFactory.newIntArrayVector(IntStream.range(0, positionCount).toArray(), positionCount); } - public void testReleasedDocVectorInvalidatesBlockState() { + private Vector randomArrayVector() { int positionCount = randomIntBetween(0, 100); - DocVector vector = new DocVector(intVector(positionCount), intVector(positionCount), intVector(positionCount), true); - DocBlock block = vector.asBlock(); + int vectorType = randomIntBetween(0, 4); - int numRefs = randomIntBetween(1, 10); - for (int i = 0; i < numRefs - 1; i++) { - block.incRef(); - } + return switch (vectorType) { + case 0 -> { + boolean[] values = new boolean[positionCount]; + Arrays.fill(values, randomBoolean()); + yield blockFactory.newBooleanArrayVector(values, positionCount); + } + case 1 -> { + BytesRefArray values = new BytesRefArray(positionCount, BigArrays.NON_RECYCLING_INSTANCE); + for (int i = 0; i < positionCount; i++) { + values.append(new BytesRef(randomByteArrayOfLength(between(1, 20)))); + } + + yield blockFactory.newBytesRefArrayVector(values, positionCount); + } + case 2 -> { + double[] values = new double[positionCount]; + Arrays.fill(values, 1.0); + + yield blockFactory.newDoubleArrayVector(values, positionCount); + } + case 3 -> { + int[] values = new int[positionCount]; + Arrays.fill(values, 1); + + yield blockFactory.newIntArrayVector(values, positionCount); + } + default -> { + long[] values = new long[positionCount]; + Arrays.fill(values, 1L); - vector.close(); - assertEquals(false, block.tryIncRef()); - expectThrows(IllegalStateException.class, block::close); - expectThrows(IllegalStateException.class, block::incRef); + yield blockFactory.newLongArrayVector(values, positionCount); + } + }; } - private IntVector intVector(int positionCount) { - return blockFactory.newIntArrayVector(IntStream.range(0, positionCount).toArray(), positionCount); + private Vector randomBigArrayVector() { + int positionCount = randomIntBetween(0, 10000); + int arrayType = randomIntBetween(0, 3); + + return switch (arrayType) { + case 0 -> { + BitArray values = new BitArray(positionCount, blockFactory.bigArrays()); + for (int i = 0; i < positionCount; i++) { + if (randomBoolean()) { + values.set(positionCount); + } + } + + yield new BooleanBigArrayVector(values, positionCount, blockFactory); + } + case 1 -> { + DoubleArray values = blockFactory.bigArrays().newDoubleArray(positionCount, false); + for (int i = 0; i < positionCount; i++) { + values.set(i, randomDouble()); + } + + yield new DoubleBigArrayVector(values, positionCount, blockFactory); + } + case 2 -> { + IntArray values = blockFactory.bigArrays().newIntArray(positionCount, false); + for (int i = 0; i < positionCount; i++) { + values.set(i, randomInt()); + } + + yield new IntBigArrayVector(values, positionCount, blockFactory); + } + default -> { + LongArray values = blockFactory.bigArrays().newLongArray(positionCount, false); + for (int i = 0; i < positionCount; i++) { + values.set(i, randomLong()); + } + + yield new LongBigArrayVector(values, positionCount, blockFactory); + } + }; } - private Vector randomNonDocVector() { + private Vector randomConstantVector() { int positionCount = randomIntBetween(0, 100); int vectorType = randomIntBetween(0, 4); @@ -1153,7 +1269,7 @@ private Block randomArrayBlock() { return switch (arrayType) { case 0 -> { boolean[] values = new boolean[positionCount]; - Arrays.fill(values, true); + Arrays.fill(values, randomBoolean()); yield blockFactory.newBooleanArrayBlock(values, positionCount, new int[] {}, new BitSet(), randomOrdering()); } @@ -1185,4 +1301,46 @@ private Block randomArrayBlock() { } }; } + + private Block randomBigArrayBlock() { + int positionCount = randomIntBetween(0, 10000); + int arrayType = randomIntBetween(0, 3); + + return switch (arrayType) { + case 0 -> { + BitArray values = new BitArray(positionCount, blockFactory.bigArrays()); + for (int i = 0; i < positionCount; i++) { + if (randomBoolean()) { + values.set(positionCount); + } + } + + yield new BooleanBigArrayBlock(values, positionCount, null, new BitSet(), Block.MvOrdering.UNORDERED, blockFactory); + } + case 1 -> { + DoubleArray values = blockFactory.bigArrays().newDoubleArray(positionCount, false); + for (int i = 0; i < positionCount; i++) { + values.set(i, randomDouble()); + } + + yield new DoubleBigArrayBlock(values, positionCount, null, new BitSet(), Block.MvOrdering.UNORDERED, blockFactory); + } + case 2 -> { + IntArray values = blockFactory.bigArrays().newIntArray(positionCount, false); + for (int i = 0; i < positionCount; i++) { + values.set(i, randomInt()); + } + + yield new IntBigArrayBlock(values, positionCount, null, new BitSet(), Block.MvOrdering.UNORDERED, blockFactory); + } + default -> { + LongArray values = blockFactory.bigArrays().newLongArray(positionCount, false); + for (int i = 0; i < positionCount; i++) { + values.set(i, randomLong()); + } + + yield new LongBigArrayBlock(values, positionCount, null, new BitSet(), Block.MvOrdering.UNORDERED, blockFactory); + } + }; + } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java index 25cd9ed5b9fe5..f76ff0708120b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BasicPageTests.java @@ -41,41 +41,72 @@ public void testExceptions() { } public void testEqualityAndHashCodeSmallInput() { + Page in = new Page(0); EqualsHashCodeTestUtils.checkEqualsAndHashCode( - new Page(0, new Block[] {}), - page -> new Page(0, new Block[] {}), - page -> new Page(1, IntBlock.newConstantBlockWith(1, 1)) + in, + page -> new Page(0), + page -> new Page(1, blockFactory.newConstantIntBlockWith(1, 1)), + Page::releaseBlocks ); + in.releaseBlocks(); + + in = new Page(blockFactory.newIntArrayVector(new int[] {}, 0).asBlock()); EqualsHashCodeTestUtils.checkEqualsAndHashCode( - new Page(new IntArrayVector(new int[] {}, 0).asBlock()), - page -> new Page(new IntArrayVector(new int[] {}, 0).asBlock()), - page -> new Page(new IntArrayVector(new int[] { 1 }, 1).asBlock()) + in, + page -> new Page(blockFactory.newIntArrayVector(new int[] {}, 0).asBlock()), + page -> new Page(blockFactory.newIntArrayVector(new int[] { 1 }, 1).asBlock()), + Page::releaseBlocks ); + in.releaseBlocks(); + + in = new Page(blockFactory.newIntArrayVector(new int[] { 1 }, 0).asBlock()); EqualsHashCodeTestUtils.checkEqualsAndHashCode( - new Page(new IntArrayVector(new int[] { 1 }, 0).asBlock()), - page -> new Page(new IntArrayVector(new int[] { 1 }, 0).asBlock()), - page -> new Page(new IntArrayVector(new int[] { 1 }, 1).asBlock()) + in, + page -> new Page(blockFactory.newIntArrayVector(new int[] { 1 }, 0).asBlock()), + page -> new Page(blockFactory.newIntArrayVector(new int[] { 1 }, 1).asBlock()), + Page::releaseBlocks ); + in.releaseBlocks(); + + in = new Page(blockFactory.newIntArrayVector(new int[] { 1, 1, 1 }, 3).asBlock()); EqualsHashCodeTestUtils.checkEqualsAndHashCode( - new Page(new IntArrayVector(new int[] { 1, 1, 1 }, 3).asBlock()), - page -> new Page(IntBlock.newConstantBlockWith(1, 3)), - page -> new Page(IntBlock.newConstantBlockWith(1, 2)) + in, + page -> new Page(blockFactory.newConstantIntBlockWith(1, 3)), + page -> new Page(blockFactory.newConstantIntBlockWith(1, 2)), + Page::releaseBlocks ); + in.releaseBlocks(); + + in = new Page(blockFactory.newIntArrayVector(IntStream.range(0, 10).toArray(), 10).asBlock()); EqualsHashCodeTestUtils.checkEqualsAndHashCode( - new Page(new IntArrayVector(IntStream.range(0, 10).toArray(), 10).asBlock()), - page -> new Page(new IntArrayVector(IntStream.range(0, 10).toArray(), 10).asBlock()), - page -> new Page(new IntArrayVector(IntStream.range(0, 10).toArray(), 9).asBlock()) + in, + page -> new Page(blockFactory.newIntArrayVector(IntStream.range(0, 10).toArray(), 10).asBlock()), + page -> new Page(blockFactory.newIntArrayVector(IntStream.range(0, 10).toArray(), 9).asBlock()), + Page::releaseBlocks ); + in.releaseBlocks(); + + in = new Page(blockFactory.newIntArrayVector(IntStream.range(0, 100).toArray(), 100).asBlock()); EqualsHashCodeTestUtils.checkEqualsAndHashCode( - new Page(new IntArrayVector(IntStream.range(0, 100).toArray(), 100).asBlock()), - page -> new Page(new IntArrayVector(IntStream.range(0, 100).toArray(), 100).asBlock()), - page -> new Page(new LongArrayVector(LongStream.range(0, 100).toArray(), 100).asBlock()) + in, + page -> new Page(blockFactory.newIntArrayVector(IntStream.range(0, 100).toArray(), 100).asBlock()), + page -> new Page(blockFactory.newLongArrayVector(LongStream.range(0, 100).toArray(), 100).asBlock()), + Page::releaseBlocks ); - EqualsHashCodeTestUtils.checkEqualsAndHashCode( - new Page(new IntArrayVector(new int[] { 1 }, 1).asBlock()), - page -> new Page(1, page.getBlock(0)), - page -> new Page(new IntArrayVector(new int[] { 1 }, 1).asBlock(), new IntArrayVector(new int[] { 1 }, 1).asBlock()) + in.releaseBlocks(); + + in = new Page(blockFactory.newIntArrayVector(new int[] { 1 }, 1).asBlock()); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(in, page -> { + page.getBlock(0).incRef(); + return new Page(1, page.getBlock(0)); + }, + page -> new Page( + blockFactory.newIntArrayVector(new int[] { 1 }, 1).asBlock(), + blockFactory.newIntArrayVector(new int[] { 1 }, 1).asBlock() + ), + Page::releaseBlocks ); + in.releaseBlocks(); } public void testEqualityAndHashCode() throws IOException { @@ -93,7 +124,10 @@ public void testEqualityAndHashCode() throws IOException { int positions = randomInt(page.getPositionCount() - 1); for (int blockIndex = 0; blockIndex < blocks.length; blockIndex++) { Block block = page.getBlock(blockIndex); - blocks[blockIndex] = block.elementType().newBlockBuilder(positions).copyFrom(block, 0, page.getPositionCount() - 1).build(); + blocks[blockIndex] = block.elementType() + .newBlockBuilder(positions, TestBlockFactory.getNonBreakingInstance()) + .copyFrom(block, 0, page.getPositionCount() - 1) + .build(); } return new Page(blocks); }; @@ -103,13 +137,13 @@ public void testEqualityAndHashCode() throws IOException { Block[] blocks = new Block[blockCount]; for (int blockIndex = 0; blockIndex < blockCount; blockIndex++) { blocks[blockIndex] = switch (randomInt(6)) { - case 0 -> new IntArrayVector(randomInts(positions).toArray(), positions).asBlock(); - case 1 -> new LongArrayVector(randomLongs(positions).toArray(), positions).asBlock(); - case 2 -> new DoubleArrayVector(randomDoubles(positions).toArray(), positions).asBlock(); - case 3 -> IntBlock.newConstantBlockWith(randomInt(), positions); - case 4 -> LongBlock.newConstantBlockWith(randomLong(), positions); - case 5 -> DoubleBlock.newConstantBlockWith(randomDouble(), positions); - case 6 -> BytesRefBlock.newConstantBlockWith(new BytesRef(Integer.toHexString(randomInt())), positions); + case 0 -> blockFactory.newIntArrayVector(randomInts(positions).toArray(), positions).asBlock(); + case 1 -> blockFactory.newLongArrayVector(randomLongs(positions).toArray(), positions).asBlock(); + case 2 -> blockFactory.newDoubleArrayVector(randomDoubles(positions).toArray(), positions).asBlock(); + case 3 -> blockFactory.newConstantIntBlockWith(randomInt(), positions); + case 4 -> blockFactory.newConstantLongBlockWith(randomLong(), positions); + case 5 -> blockFactory.newConstantDoubleBlockWith(randomDouble(), positions); + case 6 -> blockFactory.newConstantBytesRefBlockWith(new BytesRef(Integer.toHexString(randomInt())), positions); default -> throw new AssertionError(); }; } @@ -125,36 +159,40 @@ public void testEqualityAndHashCode() throws IOException { public void testBasic() { int positions = randomInt(1024); - Page page = new Page(new IntArrayVector(IntStream.range(0, positions).toArray(), positions).asBlock()); + Page page = new Page(blockFactory.newIntArrayVector(IntStream.range(0, positions).toArray(), positions).asBlock()); assertThat(1, is(page.getBlockCount())); assertThat(positions, is(page.getPositionCount())); IntBlock block = page.getBlock(0); IntStream.range(0, positions).forEach(i -> assertThat(i, is(block.getInt(i)))); + page.releaseBlocks(); } public void testAppend() { - Page page1 = new Page(new IntArrayVector(IntStream.range(0, 10).toArray(), 10).asBlock()); - Page page2 = page1.appendBlock(new LongArrayVector(LongStream.range(0, 10).toArray(), 10).asBlock()); + Page page1 = new Page(blockFactory.newIntArrayVector(IntStream.range(0, 10).toArray(), 10).asBlock()); + Page page2 = page1.appendBlock(blockFactory.newLongArrayVector(LongStream.range(0, 10).toArray(), 10).asBlock()); assertThat(1, is(page1.getBlockCount())); assertThat(2, is(page2.getBlockCount())); IntBlock block1 = page2.getBlock(0); IntStream.range(0, 10).forEach(i -> assertThat(i, is(block1.getInt(i)))); LongBlock block2 = page2.getBlock(1); IntStream.range(0, 10).forEach(i -> assertThat((long) i, is(block2.getLong(i)))); + page2.releaseBlocks(); } public void testPageSerializationSimple() throws IOException { + IntVector toFilter = blockFactory.newIntArrayVector(IntStream.range(0, 20).toArray(), 20); Page origPage = new Page( - new IntArrayVector(IntStream.range(0, 10).toArray(), 10).asBlock(), - new LongArrayVector(LongStream.range(10, 20).toArray(), 10).asBlock(), - new DoubleArrayVector(LongStream.range(30, 40).mapToDouble(i -> i).toArray(), 10).asBlock(), - new BytesRefArrayVector(bytesRefArrayOf("0a", "1b", "2c", "3d", "4e", "5f", "6g", "7h", "8i", "9j"), 10).asBlock(), - IntBlock.newConstantBlockWith(randomInt(), 10), - LongBlock.newConstantBlockWith(randomInt(), 10), - DoubleBlock.newConstantBlockWith(randomInt(), 10), - BytesRefBlock.newConstantBlockWith(new BytesRef(Integer.toHexString(randomInt())), 10), - new IntArrayVector(IntStream.range(0, 20).toArray(), 20).filter(5, 6, 7, 8, 9, 10, 11, 12, 13, 14).asBlock() + blockFactory.newIntArrayVector(IntStream.range(0, 10).toArray(), 10).asBlock(), + blockFactory.newLongArrayVector(LongStream.range(10, 20).toArray(), 10).asBlock(), + blockFactory.newDoubleArrayVector(LongStream.range(30, 40).mapToDouble(i -> i).toArray(), 10).asBlock(), + blockFactory.newBytesRefArrayVector(bytesRefArrayOf("0a", "1b", "2c", "3d", "4e", "5f", "6g", "7h", "8i", "9j"), 10).asBlock(), + blockFactory.newConstantIntBlockWith(randomInt(), 10), + blockFactory.newConstantLongBlockWith(randomLong(), 10), + blockFactory.newConstantDoubleBlockWith(randomDouble(), 10), + blockFactory.newConstantBytesRefBlockWith(new BytesRef(Integer.toHexString(randomInt())), 10), + toFilter.filter(5, 6, 7, 8, 9, 10, 11, 12, 13, 14).asBlock() ); + toFilter.close(); try { Page deserPage = serializeDeserializePage(origPage); try { @@ -177,12 +215,12 @@ public void testPageSerializationSimple() throws IOException { public void testSerializationListPages() throws IOException { final int positions = randomIntBetween(1, 64); List origPages = List.of( - new Page(new IntArrayVector(randomInts(positions).toArray(), positions).asBlock()), + new Page(blockFactory.newIntArrayVector(randomInts(positions).toArray(), positions).asBlock()), new Page( - new LongArrayVector(randomLongs(positions).toArray(), positions).asBlock(), - DoubleBlock.newConstantBlockWith(randomInt(), positions) + blockFactory.newLongArrayVector(randomLongs(positions).toArray(), positions).asBlock(), + blockFactory.newConstantDoubleBlockWith(randomInt(), positions) ), - new Page(BytesRefBlock.newConstantBlockWith(new BytesRef("Hello World"), positions)) + new Page(blockFactory.newConstantBytesRefBlockWith(new BytesRef("Hello World"), positions)) ); try { EqualsHashCodeTestUtils.checkEqualsAndHashCode(origPages, page -> { @@ -198,7 +236,7 @@ public void testSerializationListPages() throws IOException { public void testPageMultiRelease() { int positions = randomInt(1024); - var block = new IntArrayVector(IntStream.range(0, positions).toArray(), positions).asBlock(); + var block = blockFactory.newIntArrayVector(IntStream.range(0, positions).toArray(), positions).asBlock(); Page page = new Page(block); page.releaseBlocks(); assertThat(block.isReleased(), is(true)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BigArrayBlockBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BigArrayBlockBuilderTests.java new file mode 100644 index 0000000000000..0fd78fd3cb9bf --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BigArrayBlockBuilderTests.java @@ -0,0 +1,260 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.apache.lucene.util.RamUsageEstimator; +import org.elasticsearch.common.unit.ByteSizeValue; + +import java.io.IOException; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + +public class BigArrayBlockBuilderTests extends SerializationTestCase { + + static ByteSizeValue estimateArraySize(long elementSize, long numElements) { + long bytes = RamUsageEstimator.NUM_BYTES_ARRAY_HEADER + RamUsageEstimator.alignObjectSize(elementSize * numElements); + return ByteSizeValue.ofBytes(bytes); + } + + public void testLongVector() throws IOException { + int maxPrimitiveElements = randomIntBetween(100, 1000); + var maxPrimitiveSize = estimateArraySize(Long.BYTES, maxPrimitiveElements); + blockFactory = new BlockFactory(blockFactory.breaker(), blockFactory.bigArrays(), maxPrimitiveSize); + int numElements = between(2, maxPrimitiveElements / 2); + try (var builder = blockFactory.newLongBlockBuilder(between(1, maxPrimitiveElements / 2))) { + long[] elements = new long[numElements]; + for (int i = 0; i < numElements; i++) { + elements[i] = randomLong(); + builder.appendLong(elements[i]); + } + try (LongBlock block = builder.build()) { + assertThat(block, instanceOf(LongVectorBlock.class)); + assertThat(block.asVector(), instanceOf(LongArrayVector.class)); + assertThat(block.getPositionCount(), equalTo(numElements)); + for (int i = 0; i < numElements; i++) { + assertThat(block.getLong(i), equalTo(elements[i])); + } + try (LongBlock copy = serializeDeserializeBlock(block)) { + assertThat(copy, instanceOf(LongVectorBlock.class)); + assertThat(block.asVector(), instanceOf(LongArrayVector.class)); + assertThat(copy.getPositionCount(), equalTo(numElements)); + for (int i = 0; i < numElements; i++) { + assertThat(copy.getLong(i), equalTo(elements[i])); + } + } + } + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + numElements = between(maxPrimitiveElements + 10, maxPrimitiveElements * 2); + try (var builder = blockFactory.newLongBlockBuilder(between(1, maxPrimitiveElements * 2))) { + long[] elements = new long[numElements]; + for (int i = 0; i < numElements; i++) { + elements[i] = randomLong(); + builder.appendLong(elements[i]); + } + try (LongBlock block = builder.build()) { + assertThat(block, instanceOf(LongVectorBlock.class)); + assertThat(block.asVector(), instanceOf(LongBigArrayVector.class)); + assertThat(block.getPositionCount(), equalTo(numElements)); + for (int i = 0; i < numElements; i++) { + assertThat(block.getLong(i), equalTo(elements[i])); + } + try (LongBlock copy = serializeDeserializeBlock(block)) { + assertThat(copy, instanceOf(LongVectorBlock.class)); + assertThat(block.asVector(), instanceOf(LongBigArrayVector.class)); + assertThat(copy.getPositionCount(), equalTo(numElements)); + for (int i = 0; i < numElements; i++) { + assertThat(copy.getLong(i), equalTo(elements[i])); + } + } + } + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testLongBlock() throws IOException { + int maxPrimitiveElements = randomIntBetween(1000, 5000); + var maxPrimitiveSize = estimateArraySize(Long.BYTES, maxPrimitiveElements); + blockFactory = new BlockFactory(blockFactory.breaker(), blockFactory.bigArrays(), maxPrimitiveSize); + int numElements = between(2, maxPrimitiveElements / 2); + try (var builder = blockFactory.newLongBlockBuilder(between(1, maxPrimitiveElements / 2))) { + long[] elements = new long[numElements]; + builder.beginPositionEntry(); + for (int i = 0; i < numElements; i++) { + elements[i] = randomLong(); + builder.appendLong(elements[i]); + } + builder.endPositionEntry(); + try (LongBlock block = builder.build()) { + assertThat(block, instanceOf(LongArrayBlock.class)); + assertNull(block.asVector()); + assertThat(block.getPositionCount(), equalTo(1)); + for (int i = 0; i < numElements; i++) { + assertThat(block.getLong(i), equalTo(elements[i])); + } + try (LongBlock copy = serializeDeserializeBlock(block)) { + assertThat(copy, instanceOf(LongArrayBlock.class)); + assertNull(copy.asVector()); + assertThat(copy.getPositionCount(), equalTo(1)); + for (int i = 0; i < numElements; i++) { + assertThat(copy.getLong(i), equalTo(elements[i])); + } + } + } + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + numElements = between(maxPrimitiveElements + 10, maxPrimitiveElements * 2); + try (var builder = blockFactory.newLongBlockBuilder(between(1, maxPrimitiveElements * 2))) { + long[] elements = new long[numElements]; + builder.beginPositionEntry(); + for (int i = 0; i < numElements; i++) { + elements[i] = randomLong(); + builder.appendLong(elements[i]); + } + builder.endPositionEntry(); + try (LongBlock block = builder.build()) { + assertThat(block, instanceOf(LongBigArrayBlock.class)); + assertNull(block.asVector()); + assertThat(block.getPositionCount(), equalTo(1)); + assertThat(block.getTotalValueCount(), equalTo(numElements)); + for (int i = 0; i < numElements; i++) { + assertThat(block.getLong(i), equalTo(elements[i])); + } + try (LongBlock copy = serializeDeserializeBlock(block)) { + assertThat(copy, instanceOf(LongBigArrayBlock.class)); + assertNull(block.asVector()); + assertThat(copy.getPositionCount(), equalTo(1)); + assertThat(copy.getTotalValueCount(), equalTo(numElements)); + for (int i = 0; i < numElements; i++) { + assertThat(copy.getLong(i), equalTo(elements[i])); + } + } + } + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testBooleanVector() throws IOException { + int maxPrimitiveElements = randomIntBetween(100, 1000); + var maxPrimitiveSize = estimateArraySize(Byte.BYTES, maxPrimitiveElements); + blockFactory = new BlockFactory(blockFactory.breaker(), blockFactory.bigArrays(), maxPrimitiveSize); + int numElements = between(2, maxPrimitiveElements / 2); + try (var builder = blockFactory.newBooleanBlockBuilder(between(1, maxPrimitiveElements / 2))) { + boolean[] elements = new boolean[numElements]; + for (int i = 0; i < numElements; i++) { + elements[i] = randomBoolean(); + builder.appendBoolean(elements[i]); + } + try (var block = builder.build()) { + assertThat(block, instanceOf(BooleanVectorBlock.class)); + assertThat(block.asVector(), instanceOf(BooleanArrayVector.class)); + assertThat(block.getPositionCount(), equalTo(numElements)); + for (int i = 0; i < numElements; i++) { + assertThat(block.getBoolean(i), equalTo(elements[i])); + } + try (var copy = serializeDeserializeBlock(block)) { + assertThat(copy, instanceOf(BooleanVectorBlock.class)); + assertThat(block.asVector(), instanceOf(BooleanArrayVector.class)); + assertThat(copy.getPositionCount(), equalTo(numElements)); + for (int i = 0; i < numElements; i++) { + assertThat(copy.getBoolean(i), equalTo(elements[i])); + } + } + } + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + numElements = between(maxPrimitiveElements + 10, maxPrimitiveElements * 2); + try (var builder = blockFactory.newBooleanBlockBuilder(between(1, maxPrimitiveElements * 2))) { + boolean[] elements = new boolean[numElements]; + for (int i = 0; i < numElements; i++) { + elements[i] = randomBoolean(); + builder.appendBoolean(elements[i]); + } + try (var block = builder.build()) { + assertThat(block, instanceOf(BooleanVectorBlock.class)); + assertThat(block.asVector(), instanceOf(BooleanBigArrayVector.class)); + assertThat(block.getPositionCount(), equalTo(numElements)); + for (int i = 0; i < numElements; i++) { + assertThat(block.getBoolean(i), equalTo(elements[i])); + } + try (var copy = serializeDeserializeBlock(block)) { + assertThat(copy, instanceOf(BooleanVectorBlock.class)); + assertThat(block.asVector(), instanceOf(BooleanBigArrayVector.class)); + assertThat(copy.getPositionCount(), equalTo(numElements)); + for (int i = 0; i < numElements; i++) { + assertThat(copy.getBoolean(i), equalTo(elements[i])); + } + } + } + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } + + public void testBooleanBlock() throws IOException { + int maxPrimitiveElements = randomIntBetween(1000, 5000); + var maxPrimitiveSize = estimateArraySize(Byte.BYTES, maxPrimitiveElements); + blockFactory = new BlockFactory(blockFactory.breaker(), blockFactory.bigArrays(), maxPrimitiveSize); + int numElements = between(2, maxPrimitiveElements / 2); + try (var builder = blockFactory.newBooleanBlockBuilder(between(1, maxPrimitiveElements / 2))) { + boolean[] elements = new boolean[numElements]; + builder.beginPositionEntry(); + for (int i = 0; i < numElements; i++) { + elements[i] = randomBoolean(); + builder.appendBoolean(elements[i]); + } + builder.endPositionEntry(); + try (var block = builder.build()) { + assertThat(block, instanceOf(BooleanArrayBlock.class)); + assertNull(block.asVector()); + assertThat(block.getPositionCount(), equalTo(1)); + for (int i = 0; i < numElements; i++) { + assertThat(block.getBoolean(i), equalTo(elements[i])); + } + try (var copy = serializeDeserializeBlock(block)) { + assertThat(copy, instanceOf(BooleanArrayBlock.class)); + assertNull(copy.asVector()); + assertThat(copy.getPositionCount(), equalTo(1)); + for (int i = 0; i < numElements; i++) { + assertThat(copy.getBoolean(i), equalTo(elements[i])); + } + } + } + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + numElements = between(maxPrimitiveElements + 10, maxPrimitiveElements * 2); + try (var builder = blockFactory.newBooleanBlockBuilder(between(1, maxPrimitiveElements * 2))) { + boolean[] elements = new boolean[numElements]; + builder.beginPositionEntry(); + for (int i = 0; i < numElements; i++) { + elements[i] = randomBoolean(); + builder.appendBoolean(elements[i]); + } + builder.endPositionEntry(); + try (var block = builder.build()) { + assertThat(block, instanceOf(BooleanBigArrayBlock.class)); + assertNull(block.asVector()); + assertThat(block.getPositionCount(), equalTo(1)); + assertThat(block.getTotalValueCount(), equalTo(numElements)); + for (int i = 0; i < numElements; i++) { + assertThat(block.getBoolean(i), equalTo(elements[i])); + } + try (var copy = serializeDeserializeBlock(block)) { + assertThat(copy, instanceOf(BooleanBigArrayBlock.class)); + assertNull(block.asVector()); + assertThat(copy.getPositionCount(), equalTo(1)); + assertThat(copy.getTotalValueCount(), equalTo(numElements)); + for (int i = 0; i < numElements; i++) { + assertThat(copy.getBoolean(i), equalTo(elements[i])); + } + } + } + } + assertThat(blockFactory.breaker().getUsed(), equalTo(0L)); + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BigArrayVectorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BigArrayVectorTests.java index 3033f672f897f..74d7e3e142d04 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BigArrayVectorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BigArrayVectorTests.java @@ -32,14 +32,15 @@ public void testBoolean() throws IOException { Boolean[] values = IntStream.range(0, positionCount).mapToObj(i -> randomBoolean()).toArray(Boolean[]::new); BitArray array = new BitArray(positionCount, bigArrays); IntStream.range(0, positionCount).filter(i -> values[i]).forEach(array::set); - try (var vector = new BooleanBigArrayVector(array, positionCount)) { + try (var vector = new BooleanBigArrayVector(array, positionCount, blockFactory)) { assertThat(vector.elementType(), is(ElementType.BOOLEAN)); assertThat(positionCount, is(vector.getPositionCount())); IntStream.range(0, positionCount).forEach(i -> assertThat(vector.getBoolean(i), is(values[i]))); assertThat(vector.isConstant(), is(false)); - BooleanVector filtered = vector.filter(IntStream.range(0, positionCount).toArray()); - IntStream.range(0, positionCount).forEach(i -> assertThat(filtered.getBoolean(i), is(values[i]))); - assertThat(filtered.isConstant(), is(false)); + try (BooleanVector filtered = vector.filter(IntStream.range(0, positionCount).toArray())) { + IntStream.range(0, positionCount).forEach(i -> assertThat(filtered.getBoolean(i), is(values[i]))); + assertThat(filtered.isConstant(), is(false)); + } BooleanBlock block = vector.asBlock(); assertThat(block, is(notNullValue())); IntStream.range(0, positionCount).forEach(i -> { @@ -47,7 +48,9 @@ public void testBoolean() throws IOException { assertThat(block.isNull(i), is(false)); assertThat(block.getValueCount(i), is(1)); assertThat(block.getFirstValueIndex(i), is(i)); - assertThat(block.filter(i).getBoolean(0), is(values[i])); + try (BooleanBlock filter = block.filter(i)) { + assertThat(filter.getBoolean(0), is(values[i])); + } }); BasicBlockTests.assertSingleValueDenseBlock(vector.asBlock()); assertSerialization(block); @@ -60,14 +63,15 @@ public void testInt() throws IOException { int[] values = IntStream.range(0, positionCount).map(i -> randomInt()).toArray(); IntArray array = bigArrays.newIntArray(positionCount); IntStream.range(0, positionCount).forEach(i -> array.set(i, values[i])); - try (var vector = new IntBigArrayVector(array, positionCount)) { + try (var vector = new IntBigArrayVector(array, positionCount, blockFactory)) { assertThat(vector.elementType(), is(ElementType.INT)); assertThat(positionCount, is(vector.getPositionCount())); IntStream.range(0, positionCount).forEach(i -> assertThat(vector.getInt(i), is(values[i]))); assertThat(vector.isConstant(), is(false)); - IntVector filtered = vector.filter(IntStream.range(0, positionCount).toArray()); - IntStream.range(0, positionCount).forEach(i -> assertThat(filtered.getInt(i), is(values[i]))); - assertThat(filtered.isConstant(), is(false)); + try (IntVector filtered = vector.filter(IntStream.range(0, positionCount).toArray())) { + IntStream.range(0, positionCount).forEach(i -> assertThat(filtered.getInt(i), is(values[i]))); + assertThat(filtered.isConstant(), is(false)); + } IntBlock block = vector.asBlock(); assertThat(block, is(notNullValue())); IntStream.range(0, positionCount).forEach(i -> { @@ -75,7 +79,9 @@ public void testInt() throws IOException { assertThat(block.isNull(i), is(false)); assertThat(block.getValueCount(i), is(1)); assertThat(block.getFirstValueIndex(i), is(i)); - assertThat(block.filter(i).getInt(0), is(values[i])); + try (IntBlock filter = block.filter(i)) { + assertThat(filter.getInt(0), is(values[i])); + } }); BasicBlockTests.assertSingleValueDenseBlock(vector.asBlock()); assertSerialization(block); @@ -88,14 +94,15 @@ public void testLong() throws IOException { long[] values = IntStream.range(0, positionCount).mapToLong(i -> randomLong()).toArray(); LongArray array = bigArrays.newLongArray(positionCount); IntStream.range(0, positionCount).forEach(i -> array.set(i, values[i])); - try (var vector = new LongBigArrayVector(array, positionCount)) { + try (var vector = new LongBigArrayVector(array, positionCount, blockFactory)) { assertThat(vector.elementType(), is(ElementType.LONG)); assertThat(positionCount, is(vector.getPositionCount())); IntStream.range(0, positionCount).forEach(i -> assertThat(vector.getLong(i), is(values[i]))); assertThat(vector.isConstant(), is(false)); - LongVector filtered = vector.filter(IntStream.range(0, positionCount).toArray()); - IntStream.range(0, positionCount).forEach(i -> assertThat(filtered.getLong(i), is(values[i]))); - assertThat(filtered.isConstant(), is(false)); + try (LongVector filtered = vector.filter(IntStream.range(0, positionCount).toArray())) { + IntStream.range(0, positionCount).forEach(i -> assertThat(filtered.getLong(i), is(values[i]))); + assertThat(filtered.isConstant(), is(false)); + } LongBlock block = vector.asBlock(); assertThat(block, is(notNullValue())); IntStream.range(0, positionCount).forEach(i -> { @@ -103,7 +110,9 @@ public void testLong() throws IOException { assertThat(block.isNull(i), is(false)); assertThat(block.getValueCount(i), is(1)); assertThat(block.getFirstValueIndex(i), is(i)); - assertThat(block.filter(i).getLong(0), is(values[i])); + try (LongBlock filter = block.filter(i)) { + assertThat(filter.getLong(0), is(values[i])); + } }); BasicBlockTests.assertSingleValueDenseBlock(vector.asBlock()); assertSerialization(block); @@ -116,14 +125,15 @@ public void testDouble() throws IOException { double[] values = IntStream.range(0, positionCount).mapToDouble(i -> randomDouble()).toArray(); DoubleArray array = bigArrays.newDoubleArray(positionCount); IntStream.range(0, positionCount).forEach(i -> array.set(i, values[i])); - try (var vector = new DoubleBigArrayVector(array, positionCount)) { + try (var vector = new DoubleBigArrayVector(array, positionCount, blockFactory)) { assertThat(vector.elementType(), is(ElementType.DOUBLE)); assertThat(positionCount, is(vector.getPositionCount())); IntStream.range(0, positionCount).forEach(i -> assertThat(vector.getDouble(i), is(values[i]))); assertThat(vector.isConstant(), is(false)); - DoubleVector filtered = vector.filter(IntStream.range(0, positionCount).toArray()); - IntStream.range(0, positionCount).forEach(i -> assertThat(filtered.getDouble(i), is(values[i]))); - assertThat(filtered.isConstant(), is(false)); + try (DoubleVector filtered = vector.filter(IntStream.range(0, positionCount).toArray())) { + IntStream.range(0, positionCount).forEach(i -> assertThat(filtered.getDouble(i), is(values[i]))); + assertThat(filtered.isConstant(), is(false)); + } DoubleBlock block = vector.asBlock(); assertThat(block, is(notNullValue())); IntStream.range(0, positionCount).forEach(i -> { @@ -131,7 +141,9 @@ public void testDouble() throws IOException { assertThat(block.isNull(i), is(false)); assertThat(block.getValueCount(i), is(1)); assertThat(block.getFirstValueIndex(i), is(i)); - assertThat(block.filter(i).getDouble(0), is(values[i])); + try (DoubleBlock filter = block.filter(i)) { + assertThat(filter.getDouble(0), is(values[i])); + } }); BasicBlockTests.assertSingleValueDenseBlock(vector.asBlock()); assertSerialization(block); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java index d62fd75abbcdd..b5155f3199c1c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockAccountingTests.java @@ -13,7 +13,8 @@ import org.elasticsearch.common.util.BigArray; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefArray; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.compute.operator.ComputeTestCase; +import org.elasticsearch.core.Releasables; import org.hamcrest.Matcher; import java.lang.reflect.Field; @@ -29,7 +30,7 @@ import static org.hamcrest.Matchers.lessThan; import static org.hamcrest.Matchers.lessThanOrEqualTo; -public class BlockAccountingTests extends ESTestCase { +public class BlockAccountingTests extends ComputeTestCase { static final Accumulator RAM_USAGE_ACCUMULATOR = new TestRamUsageAccumulator(); @@ -38,182 +39,277 @@ public class BlockAccountingTests extends ESTestCase { // Array Vectors public void testBooleanVector() { - Vector empty = new BooleanArrayVector(new boolean[] {}, 0); + BlockFactory blockFactory = blockFactory(); + Vector empty = blockFactory.newBooleanArrayVector(new boolean[] {}, 0); long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); - Vector emptyPlusOne = new BooleanArrayVector(new boolean[] { randomBoolean() }, 1); + Vector emptyPlusOne = blockFactory.newBooleanArrayVector(new boolean[] { randomBoolean() }, 1); assertThat(emptyPlusOne.ramBytesUsed(), is(alignObjectSize(empty.ramBytesUsed() + 1))); boolean[] randomData = new boolean[randomIntBetween(2, 1024)]; - Vector emptyPlusSome = new BooleanArrayVector(randomData, randomData.length); + Vector emptyPlusSome = blockFactory.newBooleanArrayVector(randomData, randomData.length); assertThat(emptyPlusSome.ramBytesUsed(), is(alignObjectSize(empty.ramBytesUsed() + randomData.length))); Vector filterVector = emptyPlusSome.filter(1); assertThat(filterVector.ramBytesUsed(), lessThan(emptyPlusSome.ramBytesUsed())); + Releasables.close(empty, emptyPlusOne, emptyPlusSome, filterVector); } public void testIntVector() { - Vector empty = new IntArrayVector(new int[] {}, 0); + BlockFactory blockFactory = blockFactory(); + Vector empty = blockFactory.newIntArrayVector(new int[] {}, 0); long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); - Vector emptyPlusOne = new IntArrayVector(new int[] { randomInt() }, 1); + Vector emptyPlusOne = blockFactory.newIntArrayVector(new int[] { randomInt() }, 1); assertThat(emptyPlusOne.ramBytesUsed(), is(alignObjectSize(empty.ramBytesUsed() + Integer.BYTES))); int[] randomData = new int[randomIntBetween(2, 1024)]; - Vector emptyPlusSome = new IntArrayVector(randomData, randomData.length); + Vector emptyPlusSome = blockFactory.newIntArrayVector(randomData, randomData.length); assertThat(emptyPlusSome.ramBytesUsed(), is(alignObjectSize(empty.ramBytesUsed() + (long) Integer.BYTES * randomData.length))); Vector filterVector = emptyPlusSome.filter(1); assertThat(filterVector.ramBytesUsed(), lessThan(emptyPlusSome.ramBytesUsed())); + Releasables.close(empty, emptyPlusOne, emptyPlusSome, filterVector); } public void testLongVector() { - Vector empty = new LongArrayVector(new long[] {}, 0); + BlockFactory blockFactory = blockFactory(); + Vector empty = blockFactory.newLongArrayVector(new long[] {}, 0); long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); - Vector emptyPlusOne = new LongArrayVector(new long[] { randomLong() }, 1); + Vector emptyPlusOne = blockFactory.newLongArrayVector(new long[] { randomLong() }, 1); assertThat(emptyPlusOne.ramBytesUsed(), is(empty.ramBytesUsed() + Long.BYTES)); long[] randomData = new long[randomIntBetween(2, 1024)]; - Vector emptyPlusSome = new LongArrayVector(randomData, randomData.length); + Vector emptyPlusSome = blockFactory.newLongArrayVector(randomData, randomData.length); assertThat(emptyPlusSome.ramBytesUsed(), is(empty.ramBytesUsed() + (long) Long.BYTES * randomData.length)); Vector filterVector = emptyPlusSome.filter(1); assertThat(filterVector.ramBytesUsed(), lessThan(emptyPlusSome.ramBytesUsed())); + + Releasables.close(empty, emptyPlusOne, emptyPlusSome, filterVector); } public void testDoubleVector() { - Vector empty = new DoubleArrayVector(new double[] {}, 0); + BlockFactory blockFactory = blockFactory(); + Vector empty = blockFactory.newDoubleArrayVector(new double[] {}, 0); long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); - Vector emptyPlusOne = new DoubleArrayVector(new double[] { randomDouble() }, 1); + Vector emptyPlusOne = blockFactory.newDoubleArrayVector(new double[] { randomDouble() }, 1); assertThat(emptyPlusOne.ramBytesUsed(), is(empty.ramBytesUsed() + Double.BYTES)); double[] randomData = new double[randomIntBetween(2, 1024)]; - Vector emptyPlusSome = new DoubleArrayVector(randomData, randomData.length); + Vector emptyPlusSome = blockFactory.newDoubleArrayVector(randomData, randomData.length); assertThat(emptyPlusSome.ramBytesUsed(), is(empty.ramBytesUsed() + (long) Double.BYTES * randomData.length)); // a filter becomes responsible for it's enclosing data, both in terms of accountancy and releasability Vector filterVector = emptyPlusSome.filter(1); assertThat(filterVector.ramBytesUsed(), lessThan(emptyPlusSome.ramBytesUsed())); + + Releasables.close(empty, emptyPlusOne, emptyPlusSome, filterVector); } public void testBytesRefVector() { - try ( - var emptyArray = new BytesRefArray(0, BigArrays.NON_RECYCLING_INSTANCE); - var arrayWithOne = new BytesRefArray(0, BigArrays.NON_RECYCLING_INSTANCE) - ) { - Vector emptyVector = new BytesRefArrayVector(emptyArray, 0); - long expectedEmptyVectorUsed = RamUsageTester.ramUsed(emptyVector, RAM_USAGE_ACCUMULATOR); - assertThat(emptyVector.ramBytesUsed(), is(expectedEmptyVectorUsed)); - - var bytesRef = new BytesRef(randomAlphaOfLengthBetween(1, 16)); - arrayWithOne.append(bytesRef); - Vector emptyPlusOne = new BytesRefArrayVector(arrayWithOne, 1); - assertThat(emptyPlusOne.ramBytesUsed(), between(emptyVector.ramBytesUsed() + bytesRef.length, UPPER_BOUND)); - - Vector filterVector = emptyPlusOne.filter(0); - assertThat(filterVector.ramBytesUsed(), lessThan(emptyPlusOne.ramBytesUsed())); - } + BlockFactory blockFactory = blockFactory(); + var emptyArray = new BytesRefArray(0, blockFactory.bigArrays()); + var arrayWithOne = new BytesRefArray(0, blockFactory.bigArrays()); + Vector emptyVector = blockFactory.newBytesRefArrayVector(emptyArray, 0); + long expectedEmptyVectorUsed = RamUsageTester.ramUsed(emptyVector, RAM_USAGE_ACCUMULATOR); + assertThat(emptyVector.ramBytesUsed(), is(expectedEmptyVectorUsed)); + + var bytesRef = new BytesRef(randomAlphaOfLengthBetween(1, 16)); + arrayWithOne.append(bytesRef); + Vector emptyPlusOne = blockFactory.newBytesRefArrayVector(arrayWithOne, 1); + assertThat(emptyPlusOne.ramBytesUsed(), between(emptyVector.ramBytesUsed() + bytesRef.length, UPPER_BOUND)); + + Vector filterVector = emptyPlusOne.filter(0); + assertThat(filterVector.ramBytesUsed(), lessThan(emptyPlusOne.ramBytesUsed())); + Releasables.close(emptyVector, emptyPlusOne, filterVector); } // Array Blocks public void testBooleanBlock() { - Block empty = new BooleanArrayBlock(new boolean[] {}, 0, new int[0], null, Block.MvOrdering.UNORDERED); + BlockFactory blockFactory = blockFactory(); + Block empty = new BooleanArrayBlock(new boolean[] {}, 0, new int[0], null, Block.MvOrdering.UNORDERED, blockFactory); long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); - Block emptyPlusOne = new BooleanArrayBlock(new boolean[] { randomBoolean() }, 1, new int[] { 0 }, null, Block.MvOrdering.UNORDERED); + Block emptyPlusOne = new BooleanArrayBlock( + new boolean[] { randomBoolean() }, + 1, + new int[] { 0 }, + null, + Block.MvOrdering.UNORDERED, + blockFactory + ); assertThat(emptyPlusOne.ramBytesUsed(), is(alignObjectSize(empty.ramBytesUsed() + 1) + alignObjectSize(Integer.BYTES))); boolean[] randomData = new boolean[randomIntBetween(2, 1024)]; int[] valueIndices = IntStream.range(0, randomData.length + 1).toArray(); - Block emptyPlusSome = new BooleanArrayBlock(randomData, randomData.length, valueIndices, null, Block.MvOrdering.UNORDERED); + Block emptyPlusSome = new BooleanArrayBlock( + randomData, + randomData.length, + valueIndices, + null, + Block.MvOrdering.UNORDERED, + blockFactory + ); long expected = empty.ramBytesUsed() + ramBytesForBooleanArray(randomData) + ramBytesForIntArray(valueIndices); assertThat(emptyPlusSome.ramBytesUsed(), is(expected)); Block filterBlock = emptyPlusSome.filter(1); assertThat(filterBlock.ramBytesUsed(), lessThan(emptyPlusOne.ramBytesUsed())); + Releasables.close(filterBlock); } public void testBooleanBlockWithNullFirstValues() { - Block empty = new BooleanArrayBlock(new boolean[] {}, 0, null, BitSet.valueOf(new byte[] { 1 }), Block.MvOrdering.UNORDERED); + Block empty = new BooleanArrayBlock( + new boolean[] {}, + 0, + null, + BitSet.valueOf(new byte[] { 1 }), + Block.MvOrdering.UNORDERED, + blockFactory() + ); long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), lessThanOrEqualTo(expectedEmptyUsed)); } public void testIntBlock() { - Block empty = new IntArrayBlock(new int[] {}, 0, new int[] {}, null, Block.MvOrdering.UNORDERED); + BlockFactory blockFactory = blockFactory(); + Block empty = new IntArrayBlock(new int[] {}, 0, new int[] {}, null, Block.MvOrdering.UNORDERED, blockFactory); long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); - Block emptyPlusOne = new IntArrayBlock(new int[] { randomInt() }, 1, new int[] { 0 }, null, Block.MvOrdering.UNORDERED); + Block emptyPlusOne = new IntArrayBlock( + new int[] { randomInt() }, + 1, + new int[] { 0 }, + null, + Block.MvOrdering.UNORDERED, + blockFactory + ); assertThat(emptyPlusOne.ramBytesUsed(), is(empty.ramBytesUsed() + alignObjectSize(Integer.BYTES) + alignObjectSize(Integer.BYTES))); int[] randomData = new int[randomIntBetween(2, 1024)]; int[] valueIndices = IntStream.range(0, randomData.length + 1).toArray(); - Block emptyPlusSome = new IntArrayBlock(randomData, randomData.length, valueIndices, null, Block.MvOrdering.UNORDERED); + Block emptyPlusSome = new IntArrayBlock( + randomData, + randomData.length, + valueIndices, + null, + Block.MvOrdering.UNORDERED, + blockFactory + ); long expected = empty.ramBytesUsed() + ramBytesForIntArray(randomData) + ramBytesForIntArray(valueIndices); assertThat(emptyPlusSome.ramBytesUsed(), is(expected)); Block filterBlock = emptyPlusSome.filter(1); assertThat(filterBlock.ramBytesUsed(), lessThan(emptyPlusOne.ramBytesUsed())); + Releasables.close(filterBlock); } public void testIntBlockWithNullFirstValues() { - Block empty = new IntArrayBlock(new int[] {}, 0, null, BitSet.valueOf(new byte[] { 1 }), Block.MvOrdering.UNORDERED); + BlockFactory blockFactory = blockFactory(); + Block empty = new IntArrayBlock(new int[] {}, 0, null, BitSet.valueOf(new byte[] { 1 }), Block.MvOrdering.UNORDERED, blockFactory); long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } public void testLongBlock() { - Block empty = new LongArrayBlock(new long[] {}, 0, new int[0], null, Block.MvOrdering.UNORDERED); + BlockFactory blockFactory = blockFactory(); + Block empty = new LongArrayBlock(new long[] {}, 0, new int[0], null, Block.MvOrdering.UNORDERED, blockFactory); long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); - Block emptyPlusOne = new LongArrayBlock(new long[] { randomInt() }, 1, new int[] { 0 }, null, Block.MvOrdering.UNORDERED); + Block emptyPlusOne = new LongArrayBlock( + new long[] { randomInt() }, + 1, + new int[] { 0 }, + null, + Block.MvOrdering.UNORDERED, + blockFactory + ); assertThat(emptyPlusOne.ramBytesUsed(), is(alignObjectSize(empty.ramBytesUsed() + Long.BYTES) + alignObjectSize(Integer.BYTES))); long[] randomData = new long[randomIntBetween(2, 1024)]; int[] valueIndices = IntStream.range(0, randomData.length + 1).toArray(); - Block emptyPlusSome = new LongArrayBlock(randomData, randomData.length, valueIndices, null, Block.MvOrdering.UNORDERED); + Block emptyPlusSome = new LongArrayBlock( + randomData, + randomData.length, + valueIndices, + null, + Block.MvOrdering.UNORDERED, + blockFactory + ); long expected = empty.ramBytesUsed() + ramBytesForLongArray(randomData) + ramBytesForIntArray(valueIndices); assertThat(emptyPlusSome.ramBytesUsed(), is(expected)); Block filterBlock = emptyPlusSome.filter(1); assertThat(filterBlock.ramBytesUsed(), lessThan(emptyPlusOne.ramBytesUsed())); + Releasables.close(filterBlock); } public void testLongBlockWithNullFirstValues() { - Block empty = new LongArrayBlock(new long[] {}, 0, null, BitSet.valueOf(new byte[] { 1 }), Block.MvOrdering.UNORDERED); + Block empty = new LongArrayBlock( + new long[] {}, + 0, + null, + BitSet.valueOf(new byte[] { 1 }), + Block.MvOrdering.UNORDERED, + blockFactory() + ); long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } public void testDoubleBlock() { - Block empty = new DoubleArrayBlock(new double[] {}, 0, new int[0], null, Block.MvOrdering.UNORDERED); + BlockFactory blockFactory = blockFactory(); + Block empty = new DoubleArrayBlock(new double[] {}, 0, new int[0], null, Block.MvOrdering.UNORDERED, blockFactory); long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); - Block emptyPlusOne = new DoubleArrayBlock(new double[] { randomInt() }, 1, new int[] { 0 }, null, Block.MvOrdering.UNORDERED); + Block emptyPlusOne = new DoubleArrayBlock( + new double[] { randomInt() }, + 1, + new int[] { 0 }, + null, + Block.MvOrdering.UNORDERED, + blockFactory + ); assertThat(emptyPlusOne.ramBytesUsed(), is(alignObjectSize(empty.ramBytesUsed() + Double.BYTES) + alignObjectSize(Integer.BYTES))); double[] randomData = new double[randomIntBetween(2, 1024)]; int[] valueIndices = IntStream.range(0, randomData.length + 1).toArray(); - Block emptyPlusSome = new DoubleArrayBlock(randomData, randomData.length, valueIndices, null, Block.MvOrdering.UNORDERED); + Block emptyPlusSome = new DoubleArrayBlock( + randomData, + randomData.length, + valueIndices, + null, + Block.MvOrdering.UNORDERED, + blockFactory + ); long expected = empty.ramBytesUsed() + ramBytesForDoubleArray(randomData) + ramBytesForIntArray(valueIndices); assertThat(emptyPlusSome.ramBytesUsed(), is(expected)); Block filterBlock = emptyPlusSome.filter(1); assertThat(filterBlock.ramBytesUsed(), lessThan(emptyPlusOne.ramBytesUsed())); + Releasables.close(filterBlock); } public void testDoubleBlockWithNullFirstValues() { - Block empty = new DoubleArrayBlock(new double[] {}, 0, null, BitSet.valueOf(new byte[] { 1 }), Block.MvOrdering.UNORDERED); + Block empty = new DoubleArrayBlock( + new double[] {}, + 0, + null, + BitSet.valueOf(new byte[] { 1 }), + Block.MvOrdering.UNORDERED, + blockFactory() + ); long expectedEmptyUsed = RamUsageTester.ramUsed(empty, RAM_USAGE_ACCUMULATOR); assertThat(empty.ramBytesUsed(), is(expectedEmptyUsed)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java index a9f08eee02d70..9c1b02aa74107 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderAppendBlockTests.java @@ -7,17 +7,19 @@ package org.elasticsearch.compute.data; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.compute.operator.ComputeTestCase; import java.util.ArrayList; import java.util.List; import static org.hamcrest.Matchers.equalTo; -public class BlockBuilderAppendBlockTests extends ESTestCase { +public class BlockBuilderAppendBlockTests extends ComputeTestCase { public void testBasic() { - IntBlock src = new IntBlockBuilder(10, BlockFactory.getNonBreakingInstance()).appendInt(1) + BlockFactory blockFactory = blockFactory(); + IntBlock src = blockFactory.newIntBlockBuilder(10) + .appendInt(1) .appendNull() .beginPositionEntry() .appendInt(4) @@ -32,40 +34,48 @@ public void testBasic() { .endPositionEntry() .build(); // copy position by position - { - IntBlock.Builder dst = IntBlock.newBlockBuilder(randomIntBetween(1, 20)); + try (IntBlock.Builder dst = blockFactory.newIntBlockBuilder(randomIntBetween(1, 20))) { for (int i = 0; i < src.getPositionCount(); i++) { - dst.appendAllValuesToCurrentPosition(src.filter(i)); + try (IntBlock filter = src.filter(i)) { + dst.appendAllValuesToCurrentPosition(filter); + } + } + try (IntBlock block = dst.build()) { + assertThat(block, equalTo(src)); } - assertThat(dst.build(), equalTo(src)); } // copy all block - { - IntBlock.Builder dst = IntBlock.newBlockBuilder(randomIntBetween(1, 20)); - IntBlock block = dst.appendAllValuesToCurrentPosition(src).build(); - assertThat(block.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(block, 0), equalTo(List.of(1, 4, 6, 10, 20, 30, 1))); + try (IntBlock.Builder dst = blockFactory.newIntBlockBuilder(randomIntBetween(1, 20))) { + try (IntBlock block = dst.appendAllValuesToCurrentPosition(src).build()) { + assertThat(block.getPositionCount(), equalTo(1)); + assertThat(BlockUtils.toJavaObject(block, 0), equalTo(List.of(1, 4, 6, 10, 20, 30, 1))); + } } - { - Block dst = randomlyDivideAndMerge(src); + try (Block dst = randomlyDivideAndMerge(src)) { assertThat(dst.getPositionCount(), equalTo(1)); assertThat(BlockUtils.toJavaObject(dst, 0), equalTo(List.of(1, 4, 6, 10, 20, 30, 1))); } } public void testRandomNullBlock() { - IntBlock.Builder src = IntBlock.newBlockBuilder(10); - src.appendAllValuesToCurrentPosition(new ConstantNullBlock(between(1, 100))); + BlockFactory blockFactory = blockFactory(); + IntBlock.Builder src = blockFactory.newIntBlockBuilder(10); + try (var nullBlock = blockFactory.newConstantNullBlock(between(1, 100))) { + src.appendAllValuesToCurrentPosition(nullBlock); + } src.appendInt(101); - src.appendAllValuesToCurrentPosition(new ConstantNullBlock(between(1, 100))); + try (var nullBlock = blockFactory.newConstantNullBlock(between(1, 100))) { + src.appendAllValuesToCurrentPosition(nullBlock); + } IntBlock block = src.build(); assertThat(block.getPositionCount(), equalTo(3)); assertTrue(block.isNull(0)); assertThat(block.getInt(1), equalTo(101)); assertTrue(block.isNull(2)); - Block flatten = randomlyDivideAndMerge(block); - assertThat(flatten.getPositionCount(), equalTo(1)); - assertThat(BlockUtils.toJavaObject(flatten, 0), equalTo(101)); + try (Block flatten = randomlyDivideAndMerge(block)) { + assertThat(flatten.getPositionCount(), equalTo(1)); + assertThat(BlockUtils.toJavaObject(flatten, 0), equalTo(101)); + } } public void testRandom() { @@ -79,14 +89,17 @@ public void testRandom() { 0, between(0, 16) ).block(); - randomlyDivideAndMerge(block); + + block = randomlyDivideAndMerge(block); + block.close(); } private Block randomlyDivideAndMerge(Block block) { while (block.getPositionCount() > 1 || randomBoolean()) { int positionCount = block.getPositionCount(); int offset = 0; - Block.Builder builder = block.elementType().newBlockBuilder(randomIntBetween(1, 100)); + Block.Builder builder = block.elementType() + .newBlockBuilder(randomIntBetween(1, 100), TestBlockFactory.getNonBreakingInstance()); List expected = new ArrayList<>(); while (offset < positionCount) { int length = randomIntBetween(1, positionCount - offset); @@ -98,7 +111,9 @@ private Block randomlyDivideAndMerge(Block block) { Block sub = block.filter(positions); expected.add(extractAndFlattenBlockValues(sub)); builder.appendAllValuesToCurrentPosition(sub); + sub.close(); } + block.close(); block = builder.build(); assertThat(block.getPositionCount(), equalTo(expected.size())); for (int i = 0; i < block.getPositionCount(); i++) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java index 529c1afeaaf44..e3a9aba0d1b7f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockBuilderCopyFromTests.java @@ -69,22 +69,24 @@ public void testEvensFiltered() { } public void testSmallAllNull() { - assertSmall(Block.constantNullBlock(10)); + assertSmall(TestBlockFactory.getNonBreakingInstance().newConstantNullBlock(10)); } public void testEvensAllNull() { - assertEvens(Block.constantNullBlock(10)); + assertEvens(TestBlockFactory.getNonBreakingInstance().newConstantNullBlock(10)); } private void assertSmall(Block block) { int smallSize = Math.min(block.getPositionCount(), 10); - Block.Builder builder = elementType.newBlockBuilder(smallSize); + BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); + Block.Builder builder = elementType.newBlockBuilder(smallSize, blockFactory); builder.copyFrom(block, 0, smallSize); assertBlockValues(builder.build(), BasicBlockTests.valuesAtPositions(block, 0, smallSize)); } private void assertEvens(Block block) { - Block.Builder builder = elementType.newBlockBuilder(block.getPositionCount() / 2); + BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); + Block.Builder builder = elementType.newBlockBuilder(block.getPositionCount() / 2, blockFactory); List> expected = new ArrayList<>(); for (int i = 0; i < block.getPositionCount(); i += 2) { builder.copyFrom(block, i, i + 1); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java index 96e5de20ba35c..3a8500ea90cb3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockFactoryTests.java @@ -568,7 +568,6 @@ public void testReleaseVector() { vector.close(); } assertTrue(vector.isReleased()); - assertTrue(vector.asBlock().isReleased()); assertThat(breaker.getUsed(), equalTo(0L)); } @@ -651,7 +650,12 @@ public void testAllowPassingBlockToDifferentContext() throws Exception { public void testOwningFactoryOfVectorBlock() { BlockFactory parentFactory = blockFactory(ByteSizeValue.ofBytes(between(1024, 4096))); LocalCircuitBreaker localBreaker = new LocalCircuitBreaker(parentFactory.breaker(), between(0, 1024), between(0, 1024)); - BlockFactory localFactory = new BlockFactory(localBreaker, bigArrays, parentFactory); + BlockFactory localFactory = new BlockFactory( + localBreaker, + bigArrays, + BlockFactory.DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE, + parentFactory + ); int numValues = between(2, 10); try (var builder = localFactory.newIntVectorBuilder(numValues)) { for (int i = 0; i < numValues; i++) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java index e44697ab8534c..b13aa040f307d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BlockSerializationTests.java @@ -30,129 +30,179 @@ public class BlockSerializationTests extends SerializationTestCase { public void testConstantIntBlock() throws IOException { - assertConstantBlockImpl(IntBlock.newConstantBlockWith(randomInt(), randomIntBetween(1, 8192))); + assertConstantBlockImpl(blockFactory.newConstantIntBlockWith(randomInt(), randomIntBetween(1, 8192))); } public void testConstantLongBlockLong() throws IOException { - assertConstantBlockImpl(LongBlock.newConstantBlockWith(randomLong(), randomIntBetween(1, 8192))); + assertConstantBlockImpl(blockFactory.newConstantLongBlockWith(randomLong(), randomIntBetween(1, 8192))); } public void testConstantDoubleBlock() throws IOException { - assertConstantBlockImpl(DoubleBlock.newConstantBlockWith(randomDouble(), randomIntBetween(1, 8192))); + assertConstantBlockImpl(blockFactory.newConstantDoubleBlockWith(randomDouble(), randomIntBetween(1, 8192))); } public void testConstantBytesRefBlock() throws IOException { - Block block = BytesRefBlock.newConstantBlockWith(new BytesRef(((Integer) randomInt()).toString()), randomIntBetween(1, 8192)); + Block block = blockFactory.newConstantBytesRefBlockWith( + new BytesRef(((Integer) randomInt()).toString()), + randomIntBetween(1, 8192) + ); assertConstantBlockImpl(block); } private void assertConstantBlockImpl(Block origBlock) throws IOException { assertThat(origBlock.asVector().isConstant(), is(true)); - try (Block deserBlock = serializeDeserializeBlock(origBlock)) { + try (origBlock; Block deserBlock = serializeDeserializeBlock(origBlock)) { EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); assertThat(deserBlock.asVector().isConstant(), is(true)); } } public void testEmptyIntBlock() throws IOException { - assertEmptyBlock(IntBlock.newBlockBuilder(0).build()); - assertEmptyBlock(IntBlock.newBlockBuilder(0).appendNull().build().filter()); - assertEmptyBlock(IntVector.newVectorBuilder(0).build().asBlock()); - assertEmptyBlock(IntVector.newVectorBuilder(0).appendInt(randomInt()).build().filter().asBlock()); + assertEmptyBlock(blockFactory.newIntBlockBuilder(0).build()); + try (IntBlock toFilter = blockFactory.newIntBlockBuilder(0).appendNull().build()) { + assertEmptyBlock(toFilter.filter()); + } + assertEmptyBlock(blockFactory.newIntVectorBuilder(0).build().asBlock()); + try (IntVector toFilter = blockFactory.newIntVectorBuilder(0).appendInt(randomInt()).build()) { + assertEmptyBlock(toFilter.filter().asBlock()); + } } public void testEmptyLongBlock() throws IOException { - assertEmptyBlock(LongBlock.newBlockBuilder(0).build()); - assertEmptyBlock(LongBlock.newBlockBuilder(0).appendNull().build().filter()); - assertEmptyBlock(LongVector.newVectorBuilder(0).build().asBlock()); - assertEmptyBlock(LongVector.newVectorBuilder(0).appendLong(randomLong()).build().filter().asBlock()); + assertEmptyBlock(blockFactory.newLongBlockBuilder(0).build()); + try (LongBlock toFilter = blockFactory.newLongBlockBuilder(0).appendNull().build()) { + assertEmptyBlock(toFilter.filter()); + } + assertEmptyBlock(blockFactory.newLongVectorBuilder(0).build().asBlock()); + try (LongVector toFilter = blockFactory.newLongVectorBuilder(0).appendLong(randomLong()).build()) { + assertEmptyBlock(toFilter.filter().asBlock()); + } } public void testEmptyDoubleBlock() throws IOException { - assertEmptyBlock(DoubleBlock.newBlockBuilder(0).build()); - assertEmptyBlock(DoubleBlock.newBlockBuilder(0).appendNull().build().filter()); - assertEmptyBlock(DoubleVector.newVectorBuilder(0).build().asBlock()); - assertEmptyBlock(DoubleVector.newVectorBuilder(0).appendDouble(randomDouble()).build().filter().asBlock()); + assertEmptyBlock(blockFactory.newDoubleBlockBuilder(0).build()); + try (DoubleBlock toFilter = blockFactory.newDoubleBlockBuilder(0).appendNull().build()) { + assertEmptyBlock(toFilter.filter()); + } + assertEmptyBlock(blockFactory.newDoubleVectorBuilder(0).build().asBlock()); + try (DoubleVector toFilter = blockFactory.newDoubleVectorBuilder(0).appendDouble(randomDouble()).build()) { + assertEmptyBlock(toFilter.filter().asBlock()); + } } public void testEmptyBytesRefBlock() throws IOException { - assertEmptyBlock(BytesRefBlock.newBlockBuilder(0).build()); - assertEmptyBlock(BytesRefBlock.newBlockBuilder(0).appendNull().build().filter()); - assertEmptyBlock(BytesRefVector.newVectorBuilder(0).build().asBlock()); - assertEmptyBlock(BytesRefVector.newVectorBuilder(0).appendBytesRef(randomBytesRef()).build().filter().asBlock()); + assertEmptyBlock(blockFactory.newBytesRefBlockBuilder(0).build()); + try (BytesRefBlock toFilter = blockFactory.newBytesRefBlockBuilder(0).appendNull().build()) { + assertEmptyBlock(toFilter.filter()); + } + assertEmptyBlock(blockFactory.newBytesRefVectorBuilder(0).build().asBlock()); + try (BytesRefVector toFilter = blockFactory.newBytesRefVectorBuilder(0).appendBytesRef(randomBytesRef()).build()) { + assertEmptyBlock(toFilter.filter().asBlock()); + } } private void assertEmptyBlock(Block origBlock) throws IOException { assertThat(origBlock.getPositionCount(), is(0)); - try (Block deserBlock = serializeDeserializeBlock(origBlock)) { + try (origBlock; Block deserBlock = serializeDeserializeBlock(origBlock)) { EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); } } public void testFilterIntBlock() throws IOException { - assertFilterBlock(IntBlock.newBlockBuilder(0).appendInt(1).appendInt(2).build().filter(1)); - assertFilterBlock(IntBlock.newBlockBuilder(1).appendInt(randomInt()).appendNull().build().filter(0)); - assertFilterBlock(IntVector.newVectorBuilder(1).appendInt(randomInt()).build().filter(0).asBlock()); - assertFilterBlock(IntVector.newVectorBuilder(1).appendInt(randomInt()).appendInt(randomInt()).build().filter(0).asBlock()); + try (IntBlock toFilter = blockFactory.newIntBlockBuilder(0).appendInt(1).appendInt(2).build()) { + assertFilterBlock(toFilter.filter(1)); + } + try (IntBlock toFilter = blockFactory.newIntBlockBuilder(1).appendInt(randomInt()).appendNull().build()) { + assertFilterBlock(toFilter.filter(0)); + } + try (IntVector toFilter = blockFactory.newIntVectorBuilder(1).appendInt(randomInt()).build()) { + assertFilterBlock(toFilter.filter(0).asBlock()); + } + try (IntVector toFilter = blockFactory.newIntVectorBuilder(1).appendInt(randomInt()).appendInt(randomInt()).build()) { + assertFilterBlock(toFilter.filter(0).asBlock()); + } } public void testFilterLongBlock() throws IOException { - assertFilterBlock(LongBlock.newBlockBuilder(0).appendLong(1).appendLong(2).build().filter(1)); - assertFilterBlock(LongBlock.newBlockBuilder(1).appendLong(randomLong()).appendNull().build().filter(0)); - assertFilterBlock(LongVector.newVectorBuilder(1).appendLong(randomLong()).build().filter(0).asBlock()); - assertFilterBlock(LongVector.newVectorBuilder(1).appendLong(randomLong()).appendLong(randomLong()).build().filter(0).asBlock()); + try (LongBlock toFilter = blockFactory.newLongBlockBuilder(0).appendLong(1).appendLong(2).build()) { + assertFilterBlock(toFilter.filter(1)); + } + try (LongBlock toFilter = blockFactory.newLongBlockBuilder(1).appendLong(randomLong()).appendNull().build()) { + assertFilterBlock(toFilter.filter(0)); + } + try (LongVector toFilter = blockFactory.newLongVectorBuilder(1).appendLong(randomLong()).build()) { + assertFilterBlock(toFilter.filter(0).asBlock()); + } + try (LongVector toFilter = blockFactory.newLongVectorBuilder(1).appendLong(randomLong()).appendLong(randomLong()).build()) { + assertFilterBlock(toFilter.filter(0).asBlock()); + } } public void testFilterDoubleBlock() throws IOException { - assertFilterBlock(DoubleBlock.newBlockBuilder(0).appendDouble(1).appendDouble(2).build().filter(1)); - assertFilterBlock(DoubleBlock.newBlockBuilder(1).appendDouble(randomDouble()).appendNull().build().filter(0)); - assertFilterBlock(DoubleVector.newVectorBuilder(1).appendDouble(randomDouble()).build().filter(0).asBlock()); - assertFilterBlock( - DoubleVector.newVectorBuilder(1).appendDouble(randomDouble()).appendDouble(randomDouble()).build().filter(0).asBlock() - ); + try (DoubleBlock toFilter = blockFactory.newDoubleBlockBuilder(0).appendDouble(1).appendDouble(2).build()) { + assertFilterBlock(toFilter.filter(1)); + } + try (DoubleBlock toFilter = blockFactory.newDoubleBlockBuilder(1).appendDouble(randomDouble()).appendNull().build()) { + assertFilterBlock(toFilter.filter(0)); + } + try (DoubleVector toFilter = blockFactory.newDoubleVectorBuilder(1).appendDouble(randomDouble()).build()) { + assertFilterBlock(toFilter.filter(0).asBlock()); + + } + try ( + DoubleVector toFilter = blockFactory.newDoubleVectorBuilder(1).appendDouble(randomDouble()).appendDouble(randomDouble()).build() + ) { + assertFilterBlock(toFilter.filter(0).asBlock()); + } } public void testFilterBytesRefBlock() throws IOException { - assertFilterBlock( - BytesRefBlock.newBlockBuilder(0) + try ( + BytesRefBlock toFilter = blockFactory.newBytesRefBlockBuilder(0) .appendBytesRef(randomBytesRef()) .appendBytesRef(randomBytesRef()) .build() - .filter(randomIntBetween(0, 1)) - ); - assertFilterBlock( - BytesRefBlock.newBlockBuilder(0).appendBytesRef(randomBytesRef()).appendNull().build().filter(randomIntBetween(0, 1)) - ); - assertFilterBlock(BytesRefVector.newVectorBuilder(0).appendBytesRef(randomBytesRef()).build().asBlock().filter(0)); - assertFilterBlock( - BytesRefVector.newVectorBuilder(0) + ) { + assertFilterBlock(toFilter.filter(randomIntBetween(0, 1))); + } + + try (BytesRefBlock toFilter = blockFactory.newBytesRefBlockBuilder(0).appendBytesRef(randomBytesRef()).appendNull().build()) { + assertFilterBlock(toFilter.filter(randomIntBetween(0, 1))); + } + + try (BytesRefVector toFilter = blockFactory.newBytesRefVectorBuilder(0).appendBytesRef(randomBytesRef()).build()) { + assertFilterBlock(toFilter.asBlock().filter(0)); + } + try ( + BytesRefVector toFilter = blockFactory.newBytesRefVectorBuilder(0) .appendBytesRef(randomBytesRef()) .appendBytesRef(randomBytesRef()) .build() - .asBlock() - .filter(randomIntBetween(0, 1)) - ); + ) { + assertFilterBlock(toFilter.asBlock().filter(randomIntBetween(0, 1))); + } } private void assertFilterBlock(Block origBlock) throws IOException { assertThat(origBlock.getPositionCount(), is(1)); - try (Block deserBlock = serializeDeserializeBlock(origBlock)) { + try (origBlock; Block deserBlock = serializeDeserializeBlock(origBlock)) { EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); assertThat(deserBlock.getPositionCount(), is(1)); } } public void testConstantNullBlock() throws IOException { - Block origBlock = new ConstantNullBlock(randomIntBetween(1, 8192)); - try (Block deserBlock = serializeDeserializeBlock(origBlock)) { - EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); + try (Block origBlock = blockFactory.newConstantNullBlock(randomIntBetween(1, 8192))) { + try (Block deserBlock = serializeDeserializeBlock(origBlock)) { + EqualsHashCodeTestUtils.checkEqualsAndHashCode(origBlock, unused -> deserBlock); + } } } // TODO: more types, grouping, etc... public void testSimulateAggs() { DriverContext driverCtx = driverContext(); - Page page = new Page(new LongArrayVector(new long[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, 10).asBlock()); + Page page = new Page(blockFactory.newLongArrayVector(new long[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10 }, 10).asBlock()); var bigArrays = BigArrays.NON_RECYCLING_INSTANCE; var params = new Object[] {}; var function = SumLongAggregatorFunction.create(driverCtx, List.of(0)); @@ -167,18 +217,20 @@ public void testSimulateAggs() { .forEach(i -> EqualsHashCodeTestUtils.checkEqualsAndHashCode(blocks[i], unused -> deserBlocks[i])); var inputChannels = IntStream.range(0, SumLongAggregatorFunction.intermediateStateDesc().size()).boxed().toList(); - var finalAggregator = SumLongAggregatorFunction.create(driverCtx, inputChannels); - finalAggregator.addIntermediateInput(new Page(deserBlocks)); - Block[] finalBlocks = new Block[1]; - finalAggregator.evaluateFinal(finalBlocks, 0, driverCtx); - try (var finalBlock = (LongBlock) finalBlocks[0]) { - assertThat(finalBlock.getLong(0), is(55L)); + try (var finalAggregator = SumLongAggregatorFunction.create(driverCtx, inputChannels)) { + finalAggregator.addIntermediateInput(new Page(deserBlocks)); + Block[] finalBlocks = new Block[1]; + finalAggregator.evaluateFinal(finalBlocks, 0, driverCtx); + try (var finalBlock = (LongBlock) finalBlocks[0]) { + assertThat(finalBlock.getLong(0), is(55L)); + } } } finally { Releasables.close(deserBlocks); } } finally { Releasables.close(blocks); + page.releaseBlocks(); } } @@ -201,6 +253,6 @@ protected final BigArrays nonBreakingBigArrays() { * A {@link DriverContext} with a nonBreakingBigArrays. */ protected DriverContext driverContext() { // TODO make this final and return a breaking block factory - return new DriverContext(nonBreakingBigArrays(), BlockFactory.getNonBreakingInstance()); + return new DriverContext(nonBreakingBigArrays(), TestBlockFactory.getNonBreakingInstance()); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java index ae0d56d8612ce..c0fc539cecc6c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BooleanBlockEqualityTests.java @@ -14,15 +14,17 @@ public class BooleanBlockEqualityTests extends ESTestCase { + static final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); + public void testEmptyVector() { // all these "empty" vectors should be equivalent List vectors = List.of( - new BooleanArrayVector(new boolean[] {}, 0), - new BooleanArrayVector(new boolean[] { randomBoolean() }, 0), - BooleanBlock.newConstantBlockWith(randomBoolean(), 0).asVector(), - BooleanBlock.newConstantBlockWith(randomBoolean(), 0).filter().asVector(), - BooleanBlock.newBlockBuilder(0).build().asVector(), - BooleanBlock.newBlockBuilder(0).appendBoolean(randomBoolean()).build().asVector().filter() + blockFactory.newBooleanArrayVector(new boolean[] {}, 0), + blockFactory.newBooleanArrayVector(new boolean[] { randomBoolean() }, 0), + blockFactory.newConstantBooleanBlockWith(randomBoolean(), 0).asVector(), + blockFactory.newConstantBooleanBlockWith(randomBoolean(), 0).filter().asVector(), + blockFactory.newBooleanBlockBuilder(0).build().asVector(), + blockFactory.newBooleanBlockBuilder(0).appendBoolean(randomBoolean()).build().asVector().filter() ); assertAllEquals(vectors); } @@ -35,19 +37,21 @@ public void testEmptyBlock() { 0, new int[] {}, BitSet.valueOf(new byte[] { 0b00 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), new BooleanArrayBlock( new boolean[] { randomBoolean() }, 0, new int[] {}, BitSet.valueOf(new byte[] { 0b00 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), - BooleanBlock.newConstantBlockWith(randomBoolean(), 0), - BooleanBlock.newBlockBuilder(0).build(), - BooleanBlock.newBlockBuilder(0).appendBoolean(randomBoolean()).build().filter(), - BooleanBlock.newBlockBuilder(0).appendNull().build().filter() + blockFactory.newConstantBooleanBlockWith(randomBoolean(), 0), + blockFactory.newBooleanBlockBuilder(0).build(), + blockFactory.newBooleanBlockBuilder(0).appendBoolean(randomBoolean()).build().filter(), + blockFactory.newBooleanBlockBuilder(0).appendNull().build().filter() ); assertAllEquals(blocks); } @@ -55,16 +59,16 @@ public void testEmptyBlock() { public void testVectorEquality() { // all these vectors should be equivalent List vectors = List.of( - new BooleanArrayVector(new boolean[] { true, false, true }, 3), - new BooleanArrayVector(new boolean[] { true, false, true }, 3).asBlock().asVector(), - new BooleanArrayVector(new boolean[] { true, false, true, false }, 3), - new BooleanArrayVector(new boolean[] { true, false, true }, 3).filter(0, 1, 2), - new BooleanArrayVector(new boolean[] { true, false, true, false }, 4).filter(0, 1, 2), - new BooleanArrayVector(new boolean[] { false, true, false, true }, 4).filter(1, 2, 3), - new BooleanArrayVector(new boolean[] { true, true, false, true }, 4).filter(0, 2, 3), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build().asVector(), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build().asVector().filter(0, 1, 2), - BooleanBlock.newBlockBuilder(3) + blockFactory.newBooleanArrayVector(new boolean[] { true, false, true }, 3), + blockFactory.newBooleanArrayVector(new boolean[] { true, false, true }, 3).asBlock().asVector(), + blockFactory.newBooleanArrayVector(new boolean[] { true, false, true, false }, 3), + blockFactory.newBooleanArrayVector(new boolean[] { true, false, true }, 3).filter(0, 1, 2), + blockFactory.newBooleanArrayVector(new boolean[] { true, false, true, false }, 4).filter(0, 1, 2), + blockFactory.newBooleanArrayVector(new boolean[] { false, true, false, true }, 4).filter(1, 2, 3), + blockFactory.newBooleanArrayVector(new boolean[] { true, true, false, true }, 4).filter(0, 2, 3), + blockFactory.newBooleanVectorBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build(), + blockFactory.newBooleanVectorBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build().filter(0, 1, 2), + blockFactory.newBooleanBlockBuilder(3) .appendBoolean(true) .appendBoolean(true) .appendBoolean(false) @@ -72,7 +76,7 @@ public void testVectorEquality() { .build() .filter(0, 2, 3) .asVector(), - BooleanBlock.newBlockBuilder(3) + blockFactory.newBooleanBlockBuilder(3) .appendBoolean(true) .appendBoolean(true) .appendBoolean(false) @@ -85,17 +89,23 @@ public void testVectorEquality() { // all these constant-like vectors should be equivalent List moreVectors = List.of( - new BooleanArrayVector(new boolean[] { true, true, true }, 3), - new BooleanArrayVector(new boolean[] { true, true, true }, 3).asBlock().asVector(), - new BooleanArrayVector(new boolean[] { true, true, true, true }, 3), - new BooleanArrayVector(new boolean[] { true, true, true }, 3).filter(0, 1, 2), - new BooleanArrayVector(new boolean[] { true, true, true, false }, 4).filter(0, 1, 2), - new BooleanArrayVector(new boolean[] { false, true, true, true }, 4).filter(1, 2, 3), - new BooleanArrayVector(new boolean[] { true, false, true, true }, 4).filter(0, 2, 3), - BooleanBlock.newConstantBlockWith(true, 3).asVector(), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).appendBoolean(true).appendBoolean(true).build().asVector(), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).appendBoolean(true).appendBoolean(true).build().asVector().filter(0, 1, 2), - BooleanBlock.newBlockBuilder(3) + blockFactory.newBooleanArrayVector(new boolean[] { true, true, true }, 3), + blockFactory.newBooleanArrayVector(new boolean[] { true, true, true }, 3).asBlock().asVector(), + blockFactory.newBooleanArrayVector(new boolean[] { true, true, true, true }, 3), + blockFactory.newBooleanArrayVector(new boolean[] { true, true, true }, 3).filter(0, 1, 2), + blockFactory.newBooleanArrayVector(new boolean[] { true, true, true, false }, 4).filter(0, 1, 2), + blockFactory.newBooleanArrayVector(new boolean[] { false, true, true, true }, 4).filter(1, 2, 3), + blockFactory.newBooleanArrayVector(new boolean[] { true, false, true, true }, 4).filter(0, 2, 3), + blockFactory.newConstantBooleanBlockWith(true, 3).asVector(), + blockFactory.newBooleanBlockBuilder(3).appendBoolean(true).appendBoolean(true).appendBoolean(true).build().asVector(), + blockFactory.newBooleanBlockBuilder(3) + .appendBoolean(true) + .appendBoolean(true) + .appendBoolean(true) + .build() + .asVector() + .filter(0, 1, 2), + blockFactory.newBooleanBlockBuilder(3) .appendBoolean(true) .appendBoolean(false) .appendBoolean(true) @@ -103,7 +113,7 @@ public void testVectorEquality() { .build() .filter(0, 2, 3) .asVector(), - BooleanBlock.newBlockBuilder(3) + blockFactory.newBooleanBlockBuilder(3) .appendBoolean(true) .appendBoolean(false) .appendBoolean(true) @@ -118,35 +128,37 @@ public void testVectorEquality() { public void testBlockEquality() { // all these blocks should be equivalent List blocks = List.of( - new BooleanArrayVector(new boolean[] { true, false, true }, 3).asBlock(), + blockFactory.newBooleanArrayVector(new boolean[] { true, false, true }, 3).asBlock(), new BooleanArrayBlock( new boolean[] { true, false, true }, 3, new int[] { 0, 1, 2, 3 }, BitSet.valueOf(new byte[] { 0b000 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), new BooleanArrayBlock( new boolean[] { true, false, true, false }, 3, new int[] { 0, 1, 2, 3 }, BitSet.valueOf(new byte[] { 0b1000 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), - new BooleanArrayVector(new boolean[] { true, false, true }, 3).filter(0, 1, 2).asBlock(), - new BooleanArrayVector(new boolean[] { true, false, true, false }, 3).filter(0, 1, 2).asBlock(), - new BooleanArrayVector(new boolean[] { true, false, true, false }, 4).filter(0, 1, 2).asBlock(), - new BooleanArrayVector(new boolean[] { true, false, false, true }, 4).filter(0, 1, 3).asBlock(), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build(), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build().filter(0, 1, 2), - BooleanBlock.newBlockBuilder(3) + blockFactory.newBooleanArrayVector(new boolean[] { true, false, true }, 3).filter(0, 1, 2).asBlock(), + blockFactory.newBooleanArrayVector(new boolean[] { true, false, true, false }, 3).filter(0, 1, 2).asBlock(), + blockFactory.newBooleanArrayVector(new boolean[] { true, false, true, false }, 4).filter(0, 1, 2).asBlock(), + blockFactory.newBooleanArrayVector(new boolean[] { true, false, false, true }, 4).filter(0, 1, 3).asBlock(), + blockFactory.newBooleanBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build(), + blockFactory.newBooleanBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(true).build().filter(0, 1, 2), + blockFactory.newBooleanBlockBuilder(3) .appendBoolean(true) .appendBoolean(true) .appendBoolean(false) .appendBoolean(true) .build() .filter(0, 2, 3), - BooleanBlock.newBlockBuilder(3) + blockFactory.newBooleanBlockBuilder(3) .appendBoolean(true) .appendNull() .appendBoolean(false) @@ -158,30 +170,32 @@ public void testBlockEquality() { // all these constant-like blocks should be equivalent List moreBlocks = List.of( - new BooleanArrayVector(new boolean[] { true, true }, 2).asBlock(), + blockFactory.newBooleanArrayVector(new boolean[] { true, true }, 2).asBlock(), new BooleanArrayBlock( new boolean[] { true, true }, 2, new int[] { 0, 1, 2 }, BitSet.valueOf(new byte[] { 0b000 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), new BooleanArrayBlock( new boolean[] { true, true, false }, 2, new int[] { 0, 1, 2 }, BitSet.valueOf(new byte[] { 0b100 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), - new BooleanArrayVector(new boolean[] { true, true }, 2).filter(0, 1).asBlock(), - new BooleanArrayVector(new boolean[] { true, true, false }, 2).filter(0, 1).asBlock(), - new BooleanArrayVector(new boolean[] { true, true, false }, 3).filter(0, 1).asBlock(), - new BooleanArrayVector(new boolean[] { true, false, true }, 3).filter(0, 2).asBlock(), - BooleanBlock.newConstantBlockWith(true, 2), - BooleanBlock.newBlockBuilder(2).appendBoolean(true).appendBoolean(true).build(), - BooleanBlock.newBlockBuilder(2).appendBoolean(true).appendBoolean(true).build().filter(0, 1), - BooleanBlock.newBlockBuilder(2).appendBoolean(true).appendBoolean(true).appendBoolean(true).build().filter(0, 2), - BooleanBlock.newBlockBuilder(2).appendBoolean(true).appendNull().appendBoolean(true).build().filter(0, 2) + blockFactory.newBooleanArrayVector(new boolean[] { true, true }, 2).filter(0, 1).asBlock(), + blockFactory.newBooleanArrayVector(new boolean[] { true, true, false }, 2).filter(0, 1).asBlock(), + blockFactory.newBooleanArrayVector(new boolean[] { true, true, false }, 3).filter(0, 1).asBlock(), + blockFactory.newBooleanArrayVector(new boolean[] { true, false, true }, 3).filter(0, 2).asBlock(), + blockFactory.newConstantBooleanBlockWith(true, 2), + blockFactory.newBooleanBlockBuilder(2).appendBoolean(true).appendBoolean(true).build(), + blockFactory.newBooleanBlockBuilder(2).appendBoolean(true).appendBoolean(true).build().filter(0, 1), + blockFactory.newBooleanBlockBuilder(2).appendBoolean(true).appendBoolean(true).appendBoolean(true).build().filter(0, 2), + blockFactory.newBooleanBlockBuilder(2).appendBoolean(true).appendNull().appendBoolean(true).build().filter(0, 2) ); assertAllEquals(moreBlocks); } @@ -189,15 +203,15 @@ public void testBlockEquality() { public void testVectorInequality() { // all these vectors should NOT be equivalent List notEqualVectors = List.of( - new BooleanArrayVector(new boolean[] { true }, 1), - new BooleanArrayVector(new boolean[] { false }, 1), - new BooleanArrayVector(new boolean[] { true, false }, 2), - new BooleanArrayVector(new boolean[] { true, false, true }, 3), - new BooleanArrayVector(new boolean[] { false, true, false }, 3), - BooleanBlock.newConstantBlockWith(true, 2).asVector(), - BooleanBlock.newBlockBuilder(2).appendBoolean(false).appendBoolean(true).build().asVector(), - BooleanBlock.newBlockBuilder(3).appendBoolean(false).appendBoolean(false).appendBoolean(true).build().asVector(), - BooleanBlock.newBlockBuilder(1) + blockFactory.newBooleanArrayVector(new boolean[] { true }, 1), + blockFactory.newBooleanArrayVector(new boolean[] { false }, 1), + blockFactory.newBooleanArrayVector(new boolean[] { true, false }, 2), + blockFactory.newBooleanArrayVector(new boolean[] { true, false, true }, 3), + blockFactory.newBooleanArrayVector(new boolean[] { false, true, false }, 3), + blockFactory.newConstantBooleanBlockWith(true, 2).asVector(), + blockFactory.newBooleanBlockBuilder(2).appendBoolean(false).appendBoolean(true).build().asVector(), + blockFactory.newBooleanBlockBuilder(3).appendBoolean(false).appendBoolean(false).appendBoolean(true).build().asVector(), + blockFactory.newBooleanBlockBuilder(1) .appendBoolean(false) .appendBoolean(false) .appendBoolean(false) @@ -211,18 +225,28 @@ public void testVectorInequality() { public void testBlockInequality() { // all these blocks should NOT be equivalent List notEqualBlocks = List.of( - new BooleanArrayVector(new boolean[] { false }, 1).asBlock(), - new BooleanArrayVector(new boolean[] { true }, 1).asBlock(), - new BooleanArrayVector(new boolean[] { false, true }, 2).asBlock(), - new BooleanArrayVector(new boolean[] { false, true, false }, 3).asBlock(), - new BooleanArrayVector(new boolean[] { false, false, true }, 3).asBlock(), - BooleanBlock.newConstantBlockWith(true, 2), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(false).build(), - BooleanBlock.newBlockBuilder(1).appendBoolean(true).appendBoolean(false).appendBoolean(true).appendBoolean(false).build(), - BooleanBlock.newBlockBuilder(1).appendBoolean(true).appendNull().build(), - BooleanBlock.newBlockBuilder(1).appendBoolean(true).appendNull().appendBoolean(false).build(), - BooleanBlock.newBlockBuilder(1).appendBoolean(true).appendBoolean(false).build(), - BooleanBlock.newBlockBuilder(3).appendBoolean(true).beginPositionEntry().appendBoolean(false).appendBoolean(false).build() + blockFactory.newBooleanArrayVector(new boolean[] { false }, 1).asBlock(), + blockFactory.newBooleanArrayVector(new boolean[] { true }, 1).asBlock(), + blockFactory.newBooleanArrayVector(new boolean[] { false, true }, 2).asBlock(), + blockFactory.newBooleanArrayVector(new boolean[] { false, true, false }, 3).asBlock(), + blockFactory.newBooleanArrayVector(new boolean[] { false, false, true }, 3).asBlock(), + blockFactory.newConstantBooleanBlockWith(true, 2), + blockFactory.newBooleanBlockBuilder(3).appendBoolean(true).appendBoolean(false).appendBoolean(false).build(), + blockFactory.newBooleanBlockBuilder(1) + .appendBoolean(true) + .appendBoolean(false) + .appendBoolean(true) + .appendBoolean(false) + .build(), + blockFactory.newBooleanBlockBuilder(1).appendBoolean(true).appendNull().build(), + blockFactory.newBooleanBlockBuilder(1).appendBoolean(true).appendNull().appendBoolean(false).build(), + blockFactory.newBooleanBlockBuilder(1).appendBoolean(true).appendBoolean(false).build(), + blockFactory.newBooleanBlockBuilder(3) + .appendBoolean(true) + .beginPositionEntry() + .appendBoolean(false) + .appendBoolean(false) + .build() ); assertAllNotEquals(notEqualBlocks); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java index ee654497c1ec3..ec740db329c74 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/BytesRefBlockEqualityTests.java @@ -12,27 +12,28 @@ import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.operator.ComputeTestCase; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.test.ESTestCase; import java.util.Arrays; import java.util.BitSet; import java.util.List; -public class BytesRefBlockEqualityTests extends ESTestCase { +public class BytesRefBlockEqualityTests extends ComputeTestCase { final BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()); + final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); public void testEmptyVector() { // all these "empty" vectors should be equivalent try (var bytesRefArray1 = new BytesRefArray(0, bigArrays); var bytesRefArray2 = new BytesRefArray(1, bigArrays)) { List vectors = List.of( - new BytesRefArrayVector(bytesRefArray1, 0), - new BytesRefArrayVector(bytesRefArray2, 0), - BytesRefBlock.newConstantBlockWith(new BytesRef(), 0).asVector(), - BytesRefBlock.newConstantBlockWith(new BytesRef(), 0).filter().asVector(), - BytesRefBlock.newBlockBuilder(0).build().asVector(), - BytesRefBlock.newBlockBuilder(0).appendBytesRef(new BytesRef()).build().asVector().filter() + new BytesRefArrayVector(bytesRefArray1, 0, blockFactory), + new BytesRefArrayVector(bytesRefArray2, 0, blockFactory), + blockFactory.newConstantBytesRefBlockWith(new BytesRef(), 0).asVector(), + blockFactory.newConstantBytesRefBlockWith(new BytesRef(), 0).filter().asVector(), + blockFactory.newBytesRefBlockBuilder(0).build().asVector(), + blockFactory.newBytesRefBlockBuilder(0).appendBytesRef(new BytesRef()).build().asVector().filter() ); assertAllEquals(vectors); } @@ -47,19 +48,21 @@ public void testEmptyBlock() { 0, new int[] {}, BitSet.valueOf(new byte[] { 0b00 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), new BytesRefArrayBlock( bytesRefArray2, 0, new int[] {}, BitSet.valueOf(new byte[] { 0b00 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), - BytesRefBlock.newConstantBlockWith(new BytesRef(), 0), - BytesRefBlock.newBlockBuilder(0).build(), - BytesRefBlock.newBlockBuilder(0).appendBytesRef(new BytesRef()).build().filter(), - BytesRefBlock.newBlockBuilder(0).appendNull().build().filter() + blockFactory.newConstantBytesRefBlockWith(new BytesRef(), 0), + blockFactory.newBytesRefBlockBuilder(0).build(), + blockFactory.newBytesRefBlockBuilder(0).appendBytesRef(new BytesRef()).build().filter(), + blockFactory.newBytesRefBlockBuilder(0).appendNull().build().filter() ); assertAllEquals(blocks); } @@ -69,25 +72,25 @@ public void testVectorEquality() { // all these vectors should be equivalent try (var bytesRefArray1 = arrayOf("1", "2", "3"); var bytesRefArray2 = arrayOf("1", "2", "3", "4")) { List vectors = List.of( - new BytesRefArrayVector(bytesRefArray1, 3), - new BytesRefArrayVector(bytesRefArray1, 3).asBlock().asVector(), - new BytesRefArrayVector(bytesRefArray2, 3), - new BytesRefArrayVector(bytesRefArray1, 3).filter(0, 1, 2), - new BytesRefArrayVector(bytesRefArray2, 4).filter(0, 1, 2), - BytesRefBlock.newBlockBuilder(3) + new BytesRefArrayVector(bytesRefArray1, 3, blockFactory), + new BytesRefArrayVector(bytesRefArray1, 3, blockFactory).asBlock().asVector(), + new BytesRefArrayVector(bytesRefArray2, 3, blockFactory), + new BytesRefArrayVector(bytesRefArray1, 3, blockFactory).filter(0, 1, 2), + new BytesRefArrayVector(bytesRefArray2, 4, blockFactory).filter(0, 1, 2), + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("3")) .build() .asVector(), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("3")) .build() .asVector() .filter(0, 1, 2), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("4")) .appendBytesRef(new BytesRef("2")) @@ -95,7 +98,7 @@ public void testVectorEquality() { .build() .filter(0, 2, 3) .asVector(), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("4")) .appendBytesRef(new BytesRef("2")) @@ -110,26 +113,26 @@ public void testVectorEquality() { // all these constant-like vectors should be equivalent try (var bytesRefArray1 = arrayOf("1", "1", "1"); var bytesRefArray2 = arrayOf("1", "1", "1", "4")) { List moreVectors = List.of( - new BytesRefArrayVector(bytesRefArray1, 3), - new BytesRefArrayVector(bytesRefArray1, 3).asBlock().asVector(), - new BytesRefArrayVector(bytesRefArray2, 3), - new BytesRefArrayVector(bytesRefArray1, 3).filter(0, 1, 2), - new BytesRefArrayVector(bytesRefArray2, 4).filter(0, 1, 2), - BytesRefBlock.newConstantBlockWith(new BytesRef("1"), 3).asVector(), - BytesRefBlock.newBlockBuilder(3) + new BytesRefArrayVector(bytesRefArray1, 3, blockFactory), + new BytesRefArrayVector(bytesRefArray1, 3, blockFactory).asBlock().asVector(), + new BytesRefArrayVector(bytesRefArray2, 3, blockFactory), + new BytesRefArrayVector(bytesRefArray1, 3, blockFactory).filter(0, 1, 2), + new BytesRefArrayVector(bytesRefArray2, 4, blockFactory).filter(0, 1, 2), + blockFactory.newConstantBytesRefBlockWith(new BytesRef("1"), 3).asVector(), + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("1")) .build() .asVector(), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("1")) .build() .asVector() .filter(0, 1, 2), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("4")) .appendBytesRef(new BytesRef("1")) @@ -137,7 +140,7 @@ public void testVectorEquality() { .build() .filter(0, 2, 3) .asVector(), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("4")) .appendBytesRef(new BytesRef("1")) @@ -154,43 +157,45 @@ public void testBlockEquality() { // all these blocks should be equivalent try (var bytesRefArray1 = arrayOf("1", "2", "3"); var bytesRefArray2 = arrayOf("1", "2", "3", "4")) { List blocks = List.of( - new BytesRefArrayVector(bytesRefArray1, 3).asBlock(), + new BytesRefArrayVector(bytesRefArray1, 3, blockFactory).asBlock(), new BytesRefArrayBlock( bytesRefArray1, 3, new int[] { 0, 1, 2, 3 }, BitSet.valueOf(new byte[] { 0b000 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), new BytesRefArrayBlock( bytesRefArray2, 3, new int[] { 0, 1, 2, 3 }, BitSet.valueOf(new byte[] { 0b1000 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), - new BytesRefArrayVector(bytesRefArray1, 3).filter(0, 1, 2).asBlock(), - new BytesRefArrayVector(bytesRefArray2, 3).filter(0, 1, 2).asBlock(), - new BytesRefArrayVector(bytesRefArray2, 4).filter(0, 1, 2).asBlock(), - BytesRefBlock.newBlockBuilder(3) + new BytesRefArrayVector(bytesRefArray1, 3, blockFactory).filter(0, 1, 2).asBlock(), + new BytesRefArrayVector(bytesRefArray2, 3, blockFactory).filter(0, 1, 2).asBlock(), + new BytesRefArrayVector(bytesRefArray2, 4, blockFactory).filter(0, 1, 2).asBlock(), + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("3")) .build(), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("3")) .build() .filter(0, 1, 2), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("4")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("3")) .build() .filter(0, 2, 3), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendNull() .appendBytesRef(new BytesRef("2")) @@ -204,34 +209,40 @@ public void testBlockEquality() { // all these constant-like blocks should be equivalent try (var bytesRefArray1 = arrayOf("9", "9"); var bytesRefArray2 = arrayOf("9", "9", "4")) { List moreBlocks = List.of( - new BytesRefArrayVector(bytesRefArray1, 2).asBlock(), + new BytesRefArrayVector(bytesRefArray1, 2, blockFactory).asBlock(), new BytesRefArrayBlock( bytesRefArray1, 2, new int[] { 0, 1, 2 }, BitSet.valueOf(new byte[] { 0b000 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), new BytesRefArrayBlock( bytesRefArray2, 2, new int[] { 0, 1, 2 }, BitSet.valueOf(new byte[] { 0b100 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), - new BytesRefArrayVector(bytesRefArray1, 2).filter(0, 1).asBlock(), - new BytesRefArrayVector(bytesRefArray2, 2).filter(0, 1).asBlock(), - new BytesRefArrayVector(bytesRefArray2, 3).filter(0, 1).asBlock(), - BytesRefBlock.newConstantBlockWith(new BytesRef("9"), 2), - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("9")).appendBytesRef(new BytesRef("9")).build(), - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("9")).appendBytesRef(new BytesRef("9")).build().filter(0, 1), - BytesRefBlock.newBlockBuilder(2) + new BytesRefArrayVector(bytesRefArray1, 2, blockFactory).filter(0, 1).asBlock(), + new BytesRefArrayVector(bytesRefArray2, 2, blockFactory).filter(0, 1).asBlock(), + new BytesRefArrayVector(bytesRefArray2, 3, blockFactory).filter(0, 1).asBlock(), + blockFactory.newConstantBytesRefBlockWith(new BytesRef("9"), 2), + blockFactory.newBytesRefBlockBuilder(2).appendBytesRef(new BytesRef("9")).appendBytesRef(new BytesRef("9")).build(), + blockFactory.newBytesRefBlockBuilder(2) + .appendBytesRef(new BytesRef("9")) + .appendBytesRef(new BytesRef("9")) + .build() + .filter(0, 1), + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(new BytesRef("9")) .appendBytesRef(new BytesRef("4")) .appendBytesRef(new BytesRef("9")) .build() .filter(0, 2), - BytesRefBlock.newBlockBuilder(2) + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(new BytesRef("9")) .appendNull() .appendBytesRef(new BytesRef("9")) @@ -252,25 +263,25 @@ public void testVectorInequality() { var bytesRefArray5 = arrayOf("1", "2", "4") ) { List notEqualVectors = List.of( - new BytesRefArrayVector(bytesRefArray1, 1), - new BytesRefArrayVector(bytesRefArray2, 1), - new BytesRefArrayVector(bytesRefArray3, 2), - new BytesRefArrayVector(bytesRefArray4, 3), - new BytesRefArrayVector(bytesRefArray5, 3), - BytesRefBlock.newConstantBlockWith(new BytesRef("9"), 2).asVector(), - BytesRefBlock.newBlockBuilder(2) + new BytesRefArrayVector(bytesRefArray1, 1, blockFactory), + new BytesRefArrayVector(bytesRefArray2, 1, blockFactory), + new BytesRefArrayVector(bytesRefArray3, 2, blockFactory), + new BytesRefArrayVector(bytesRefArray4, 3, blockFactory), + new BytesRefArrayVector(bytesRefArray5, 3, blockFactory), + blockFactory.newConstantBytesRefBlockWith(new BytesRef("9"), 2).asVector(), + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .build() .asVector() .filter(1), - BytesRefBlock.newBlockBuilder(3) + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("5")) .build() .asVector(), - BytesRefBlock.newBlockBuilder(1) + blockFactory.newBytesRefBlockBuilder(1) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("3")) @@ -292,27 +303,35 @@ public void testBlockInequality() { var bytesRefArray5 = arrayOf("1", "2", "4") ) { List notEqualBlocks = List.of( - new BytesRefArrayVector(bytesRefArray1, 1).asBlock(), - new BytesRefArrayVector(bytesRefArray2, 1).asBlock(), - new BytesRefArrayVector(bytesRefArray3, 2).asBlock(), - new BytesRefArrayVector(bytesRefArray4, 3).asBlock(), - new BytesRefArrayVector(bytesRefArray5, 3).asBlock(), - BytesRefBlock.newConstantBlockWith(new BytesRef("9"), 2), - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("1")).appendBytesRef(new BytesRef("2")).build().filter(1), - BytesRefBlock.newBlockBuilder(3) + new BytesRefArrayVector(bytesRefArray1, 1, blockFactory).asBlock(), + new BytesRefArrayVector(bytesRefArray2, 1, blockFactory).asBlock(), + new BytesRefArrayVector(bytesRefArray3, 2, blockFactory).asBlock(), + new BytesRefArrayVector(bytesRefArray4, 3, blockFactory).asBlock(), + new BytesRefArrayVector(bytesRefArray5, 3, blockFactory).asBlock(), + blockFactory.newConstantBytesRefBlockWith(new BytesRef("9"), 2), + blockFactory.newBytesRefBlockBuilder(2) + .appendBytesRef(new BytesRef("1")) + .appendBytesRef(new BytesRef("2")) + .build() + .filter(1), + blockFactory.newBytesRefBlockBuilder(3) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("5")) .build(), - BytesRefBlock.newBlockBuilder(1) + blockFactory.newBytesRefBlockBuilder(1) .appendBytesRef(new BytesRef("1")) .appendBytesRef(new BytesRef("2")) .appendBytesRef(new BytesRef("3")) .appendBytesRef(new BytesRef("4")) .build(), - BytesRefBlock.newBlockBuilder(1).appendBytesRef(new BytesRef("1")).appendNull().build(), - BytesRefBlock.newBlockBuilder(1).appendBytesRef(new BytesRef("1")).appendNull().appendBytesRef(new BytesRef("3")).build(), - BytesRefBlock.newBlockBuilder(1).appendBytesRef(new BytesRef("1")).appendBytesRef(new BytesRef("3")).build() + blockFactory.newBytesRefBlockBuilder(1).appendBytesRef(new BytesRef("1")).appendNull().build(), + blockFactory.newBytesRefBlockBuilder(1) + .appendBytesRef(new BytesRef("1")) + .appendNull() + .appendBytesRef(new BytesRef("3")) + .build(), + blockFactory.newBytesRefBlockBuilder(1).appendBytesRef(new BytesRef("1")).appendBytesRef(new BytesRef("3")).build() ); assertAllNotEquals(notEqualBlocks); } @@ -320,8 +339,12 @@ public void testBlockInequality() { public void testSimpleBlockWithSingleNull() { List blocks = List.of( - BytesRefBlock.newBlockBuilder(3).appendBytesRef(new BytesRef("1")).appendNull().appendBytesRef(new BytesRef("3")).build(), - BytesRefBlock.newBlockBuilder(3).appendBytesRef(new BytesRef("1")).appendNull().appendBytesRef(new BytesRef("3")).build() + blockFactory.newBytesRefBlockBuilder(3) + .appendBytesRef(new BytesRef("1")) + .appendNull() + .appendBytesRef(new BytesRef("3")) + .build(), + blockFactory.newBytesRefBlockBuilder(3).appendBytesRef(new BytesRef("1")).appendNull().appendBytesRef(new BytesRef("3")).build() ); assertEquals(3, blocks.get(0).getPositionCount()); assertTrue(blocks.get(0).isNull(1)); @@ -331,8 +354,8 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - BytesRefBlock.Builder builder1 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); - BytesRefBlock.Builder builder2 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder1 = blockFactory.newBytesRefBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder2 = blockFactory.newBytesRefBlockBuilder(grow ? 0 : positions); for (int p = 0; p < positions; p++) { builder1.appendNull(); builder2.appendNull(); @@ -349,12 +372,12 @@ public void testSimpleBlockWithManyNulls() { public void testSimpleBlockWithSingleMultiValue() { List blocks = List.of( - BytesRefBlock.newBlockBuilder(1) + blockFactory.newBytesRefBlockBuilder(1) .beginPositionEntry() .appendBytesRef(new BytesRef("1a")) .appendBytesRef(new BytesRef("2b")) .build(), - BytesRefBlock.newBlockBuilder(1) + blockFactory.newBytesRefBlockBuilder(1) .beginPositionEntry() .appendBytesRef(new BytesRef("1a")) .appendBytesRef(new BytesRef("2b")) @@ -368,9 +391,9 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - BytesRefBlock.Builder builder1 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); - BytesRefBlock.Builder builder2 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); - BytesRefBlock.Builder builder3 = BytesRefBlock.newBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder1 = blockFactory.newBytesRefBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder2 = blockFactory.newBytesRefBlockBuilder(grow ? 0 : positions); + BytesRefBlock.Builder builder3 = blockFactory.newBytesRefBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { builder1.beginPositionEntry(); builder2.beginPositionEntry(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java index e2eff15fcb769..1c14683e178b8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DocVectorTests.java @@ -8,9 +8,10 @@ package org.elasticsearch.compute.data; import org.elasticsearch.common.Randomness; +import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.compute.operator.ComputeTestCase; import org.elasticsearch.core.Releasables; -import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; import java.util.Collections; @@ -21,7 +22,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -public class DocVectorTests extends ESTestCase { +public class DocVectorTests extends ComputeTestCase { public void testNonDecreasingSetTrue() { int length = between(1, 100); DocVector docs = new DocVector(intRange(0, length), intRange(0, length), intRange(0, length), true); @@ -29,28 +30,36 @@ public void testNonDecreasingSetTrue() { } public void testNonDecreasingSetFalse() { - DocVector docs = new DocVector(intRange(0, 2), intRange(0, 2), new IntArrayVector(new int[] { 1, 0 }, 2), false); + BlockFactory blockFactory = blockFactory(); + DocVector docs = new DocVector(intRange(0, 2), intRange(0, 2), blockFactory.newIntArrayVector(new int[] { 1, 0 }, 2), false); assertFalse(docs.singleSegmentNonDecreasing()); + docs.close(); } public void testNonDecreasingNonConstantShard() { - DocVector docs = new DocVector(intRange(0, 2), IntBlock.newConstantBlockWith(0, 2).asVector(), intRange(0, 2), null); + BlockFactory blockFactory = blockFactory(); + DocVector docs = new DocVector(intRange(0, 2), blockFactory.newConstantIntVector(0, 2), intRange(0, 2), null); assertFalse(docs.singleSegmentNonDecreasing()); + docs.close(); } public void testNonDecreasingNonConstantSegment() { - DocVector docs = new DocVector(IntBlock.newConstantBlockWith(0, 2).asVector(), intRange(0, 2), intRange(0, 2), null); + BlockFactory blockFactory = blockFactory(); + DocVector docs = new DocVector(blockFactory.newConstantIntVector(0, 2), intRange(0, 2), intRange(0, 2), null); assertFalse(docs.singleSegmentNonDecreasing()); + docs.close(); } public void testNonDecreasingDescendingDocs() { + BlockFactory blockFactory = blockFactory(); DocVector docs = new DocVector( - IntBlock.newConstantBlockWith(0, 2).asVector(), - IntBlock.newConstantBlockWith(0, 2).asVector(), - new IntArrayVector(new int[] { 1, 0 }, 2), + blockFactory.newConstantIntVector(0, 2), + blockFactory.newConstantIntVector(0, 2), + blockFactory.newIntArrayVector(new int[] { 1, 0 }, 2), null ); assertFalse(docs.singleSegmentNonDecreasing()); + docs.close(); } public void testShardSegmentDocMap() { @@ -100,7 +109,7 @@ public void testRandomShardSegmentDocMap() { private void assertShardSegmentDocMap(int[][] data, int[][] expected) { BlockFactory blockFactory = BlockFactoryTests.blockFactory(ByteSizeValue.ofGb(1)); - try (DocBlock.Builder builder = DocBlock.newBlockBuilder(data.length, blockFactory)) { + try (DocBlock.Builder builder = DocBlock.newBlockBuilder(blockFactory, data.length)) { for (int r = 0; r < data.length; r++) { builder.appendShard(data[r][0]); builder.appendSegment(data[r][1]); @@ -133,7 +142,8 @@ private void assertShardSegmentDocMap(int[][] data, int[][] expected) { } public void testCannotDoubleRelease() { - var block = new DocVector(intRange(0, 2), IntBlock.newConstantBlockWith(0, 2).asVector(), intRange(0, 2), null).asBlock(); + BlockFactory blockFactory = blockFactory(); + var block = new DocVector(intRange(0, 2), blockFactory.newConstantIntBlockWith(0, 2).asVector(), intRange(0, 2), null).asBlock(); assertThat(block.isReleased(), is(false)); Page page = new Page(block); @@ -141,7 +151,7 @@ public void testCannotDoubleRelease() { assertThat(block.isReleased(), is(true)); Exception e = expectThrows(IllegalStateException.class, () -> block.close()); - assertThat(e.getMessage(), containsString("can't release already released block")); + assertThat(e.getMessage(), containsString("can't release already released object")); e = expectThrows(IllegalStateException.class, () -> page.getBlock(0)); assertThat(e.getMessage(), containsString("can't read released block")); @@ -151,17 +161,55 @@ public void testCannotDoubleRelease() { } public void testRamBytesUsedWithout() { + BlockFactory blockFactory = blockFactory(); DocVector docs = new DocVector( - IntBlock.newConstantBlockWith(0, 1).asVector(), - IntBlock.newConstantBlockWith(0, 1).asVector(), - IntBlock.newConstantBlockWith(0, 1).asVector(), + blockFactory.newConstantIntBlockWith(0, 1).asVector(), + blockFactory.newConstantIntBlockWith(0, 1).asVector(), + blockFactory.newConstantIntBlockWith(0, 1).asVector(), false ); assertThat(docs.singleSegmentNonDecreasing(), is(false)); docs.ramBytesUsed(); // ensure non-singleSegmentNonDecreasing handles nulls in ramByteUsed + docs.close(); + } + + public void testFilter() { + BlockFactory factory = blockFactory(); + try ( + DocVector docs = new DocVector( + factory.newConstantIntVector(0, 10), + factory.newConstantIntVector(0, 10), + factory.newIntArrayVector(new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, 10), + false + ); + DocVector filtered = docs.filter(1, 2, 3); + DocVector expected = new DocVector( + factory.newConstantIntVector(0, 3), + factory.newConstantIntVector(0, 3), + factory.newIntArrayVector(new int[] { 1, 2, 3 }, 3), + false + ); + ) { + assertThat(filtered, equalTo(expected)); + } + } + + public void testFilterBreaks() { + BlockFactory factory = blockFactory(ByteSizeValue.ofBytes(between(160, 280))); + try ( + DocVector docs = new DocVector( + factory.newConstantIntVector(0, 10), + factory.newConstantIntVector(0, 10), + factory.newIntArrayVector(new int[] { 0, 1, 2, 3, 4, 5, 6, 7, 8, 9 }, 10), + false + ) + ) { + Exception e = expectThrows(CircuitBreakingException.class, () -> docs.filter(1, 2, 3)); + assertThat(e.getMessage(), equalTo("over test limit")); + } } IntVector intRange(int startInclusive, int endExclusive) { - return IntVector.range(startInclusive, endExclusive, BlockFactory.getNonBreakingInstance()); + return IntVector.range(startInclusive, endExclusive, TestBlockFactory.getNonBreakingInstance()); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java index 7dda97f52834e..e8f8fbcbf1c4c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/DoubleBlockEqualityTests.java @@ -7,22 +7,25 @@ package org.elasticsearch.compute.data; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.compute.operator.ComputeTestCase; +import org.elasticsearch.core.Releasables; import java.util.BitSet; import java.util.List; -public class DoubleBlockEqualityTests extends ESTestCase { +public class DoubleBlockEqualityTests extends ComputeTestCase { + + static final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); public void testEmptyVector() { // all these "empty" vectors should be equivalent List vectors = List.of( - new DoubleArrayVector(new double[] {}, 0), - new DoubleArrayVector(new double[] { 0 }, 0), - DoubleBlock.newConstantBlockWith(0, 0).asVector(), - DoubleBlock.newConstantBlockWith(0, 0).filter().asVector(), - DoubleBlock.newBlockBuilder(0).build().asVector(), - DoubleBlock.newBlockBuilder(0).appendDouble(1).build().asVector().filter() + blockFactory.newDoubleArrayVector(new double[] {}, 0), + blockFactory.newDoubleArrayVector(new double[] { 0 }, 0), + blockFactory.newConstantDoubleVector(0, 0), + blockFactory.newConstantDoubleBlockWith(0, 0).filter().asVector(), + blockFactory.newDoubleBlockBuilder(0).build().asVector(), + blockFactory.newDoubleBlockBuilder(0).appendDouble(1).build().asVector().filter() ); assertAllEquals(vectors); } @@ -30,41 +33,42 @@ public void testEmptyVector() { public void testEmptyBlock() { // all these "empty" vectors should be equivalent List blocks = List.of( - new DoubleArrayBlock( + blockFactory.newDoubleArrayBlock( new double[] {}, 0, new int[] {}, BitSet.valueOf(new byte[] { 0b00 }), randomFrom(Block.MvOrdering.values()) ), - new DoubleArrayBlock( + blockFactory.newDoubleArrayBlock( new double[] { 0 }, 0, new int[] {}, BitSet.valueOf(new byte[] { 0b00 }), randomFrom(Block.MvOrdering.values()) ), - DoubleBlock.newConstantBlockWith(0, 0), - DoubleBlock.newBlockBuilder(0).build(), - DoubleBlock.newBlockBuilder(0).appendDouble(1).build().filter(), - DoubleBlock.newBlockBuilder(0).appendNull().build().filter() + blockFactory.newConstantDoubleBlockWith(0, 0), + blockFactory.newDoubleBlockBuilder(0).build(), + blockFactory.newDoubleBlockBuilder(0).appendDouble(1).build().filter(), + blockFactory.newDoubleBlockBuilder(0).appendNull().build().filter() ); assertAllEquals(blocks); + Releasables.close(blocks); } public void testVectorEquality() { // all these vectors should be equivalent List vectors = List.of( - new DoubleArrayVector(new double[] { 1, 2, 3 }, 3), - new DoubleArrayVector(new double[] { 1, 2, 3 }, 3).asBlock().asVector(), - new DoubleArrayVector(new double[] { 1, 2, 3, 4 }, 3), - new DoubleArrayVector(new double[] { 1, 2, 3 }, 3).filter(0, 1, 2), - new DoubleArrayVector(new double[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2), - new DoubleArrayVector(new double[] { 0, 1, 2, 3 }, 4).filter(1, 2, 3), - new DoubleArrayVector(new double[] { 1, 4, 2, 3 }, 4).filter(0, 2, 3), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build().asVector(), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build().asVector().filter(0, 1, 2), - DoubleBlock.newBlockBuilder(3) + blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3), + blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3).asBlock().asVector(), + blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3, 4 }, 3), + blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3).filter(0, 1, 2), + blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2), + blockFactory.newDoubleArrayVector(new double[] { 0, 1, 2, 3 }, 4).filter(1, 2, 3), + blockFactory.newDoubleArrayVector(new double[] { 1, 4, 2, 3 }, 4).filter(0, 2, 3), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build().asVector(), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build().asVector().filter(0, 1, 2), + blockFactory.newDoubleBlockBuilder(3) .appendDouble(1) .appendDouble(4) .appendDouble(2) @@ -72,7 +76,7 @@ public void testVectorEquality() { .build() .filter(0, 2, 3) .asVector(), - DoubleBlock.newBlockBuilder(3) + blockFactory.newDoubleBlockBuilder(3) .appendDouble(1) .appendDouble(4) .appendDouble(2) @@ -85,17 +89,17 @@ public void testVectorEquality() { // all these constant-like vectors should be equivalent List moreVectors = List.of( - new DoubleArrayVector(new double[] { 1, 1, 1 }, 3), - new DoubleArrayVector(new double[] { 1, 1, 1 }, 3).asBlock().asVector(), - new DoubleArrayVector(new double[] { 1, 1, 1, 1 }, 3), - new DoubleArrayVector(new double[] { 1, 1, 1 }, 3).filter(0, 1, 2), - new DoubleArrayVector(new double[] { 1, 1, 1, 4 }, 4).filter(0, 1, 2), - new DoubleArrayVector(new double[] { 3, 1, 1, 1 }, 4).filter(1, 2, 3), - new DoubleArrayVector(new double[] { 1, 4, 1, 1 }, 4).filter(0, 2, 3), - DoubleBlock.newConstantBlockWith(1, 3).asVector(), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(1).appendDouble(1).build().asVector(), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(1).appendDouble(1).build().asVector().filter(0, 1, 2), - DoubleBlock.newBlockBuilder(3) + blockFactory.newDoubleArrayVector(new double[] { 1, 1, 1 }, 3), + blockFactory.newDoubleArrayVector(new double[] { 1, 1, 1 }, 3).asBlock().asVector(), + blockFactory.newDoubleArrayVector(new double[] { 1, 1, 1, 1 }, 3), + blockFactory.newDoubleArrayVector(new double[] { 1, 1, 1 }, 3).filter(0, 1, 2), + blockFactory.newDoubleArrayVector(new double[] { 1, 1, 1, 4 }, 4).filter(0, 1, 2), + blockFactory.newDoubleArrayVector(new double[] { 3, 1, 1, 1 }, 4).filter(1, 2, 3), + blockFactory.newDoubleArrayVector(new double[] { 1, 4, 1, 1 }, 4).filter(0, 2, 3), + blockFactory.newConstantDoubleBlockWith(1, 3).asVector(), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(1).appendDouble(1).build().asVector(), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(1).appendDouble(1).build().asVector().filter(0, 1, 2), + blockFactory.newDoubleBlockBuilder(3) .appendDouble(1) .appendDouble(4) .appendDouble(1) @@ -103,7 +107,7 @@ public void testVectorEquality() { .build() .filter(0, 2, 3) .asVector(), - DoubleBlock.newBlockBuilder(3) + blockFactory.newDoubleBlockBuilder(3) .appendDouble(1) .appendDouble(4) .appendDouble(1) @@ -118,58 +122,62 @@ public void testVectorEquality() { public void testBlockEquality() { // all these blocks should be equivalent List blocks = List.of( - new DoubleArrayVector(new double[] { 1, 2, 3 }, 3).asBlock(), + blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3).asBlock(), new DoubleArrayBlock( new double[] { 1, 2, 3 }, 3, new int[] { 0, 1, 2, 3 }, BitSet.valueOf(new byte[] { 0b000 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), new DoubleArrayBlock( new double[] { 1, 2, 3, 4 }, 3, new int[] { 0, 1, 2, 3 }, BitSet.valueOf(new byte[] { 0b1000 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), - new DoubleArrayVector(new double[] { 1, 2, 3 }, 3).filter(0, 1, 2).asBlock(), - new DoubleArrayVector(new double[] { 1, 2, 3, 4 }, 3).filter(0, 1, 2).asBlock(), - new DoubleArrayVector(new double[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2).asBlock(), - new DoubleArrayVector(new double[] { 1, 2, 4, 3 }, 4).filter(0, 1, 3).asBlock(), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build(), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build().filter(0, 1, 2), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(4).appendDouble(2).appendDouble(3).build().filter(0, 2, 3), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendNull().appendDouble(2).appendDouble(3).build().filter(0, 2, 3) + blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3).filter(0, 1, 2).asBlock(), + blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3, 4 }, 3).filter(0, 1, 2).asBlock(), + blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2).asBlock(), + blockFactory.newDoubleArrayVector(new double[] { 1, 2, 4, 3 }, 4).filter(0, 1, 3).asBlock(), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build(), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(3).build().filter(0, 1, 2), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(4).appendDouble(2).appendDouble(3).build().filter(0, 2, 3), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendNull().appendDouble(2).appendDouble(3).build().filter(0, 2, 3) ); assertAllEquals(blocks); // all these constant-like blocks should be equivalent List moreBlocks = List.of( - new DoubleArrayVector(new double[] { 9, 9 }, 2).asBlock(), + blockFactory.newDoubleArrayVector(new double[] { 9, 9 }, 2).asBlock(), new DoubleArrayBlock( new double[] { 9, 9 }, 2, new int[] { 0, 1, 2 }, BitSet.valueOf(new byte[] { 0b000 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), new DoubleArrayBlock( new double[] { 9, 9, 4 }, 2, new int[] { 0, 1, 2 }, BitSet.valueOf(new byte[] { 0b100 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), - new DoubleArrayVector(new double[] { 9, 9 }, 2).filter(0, 1).asBlock(), - new DoubleArrayVector(new double[] { 9, 9, 4 }, 2).filter(0, 1).asBlock(), - new DoubleArrayVector(new double[] { 9, 9, 4 }, 3).filter(0, 1).asBlock(), - new DoubleArrayVector(new double[] { 9, 4, 9 }, 3).filter(0, 2).asBlock(), - DoubleBlock.newConstantBlockWith(9, 2), - DoubleBlock.newBlockBuilder(2).appendDouble(9).appendDouble(9).build(), - DoubleBlock.newBlockBuilder(2).appendDouble(9).appendDouble(9).build().filter(0, 1), - DoubleBlock.newBlockBuilder(2).appendDouble(9).appendDouble(4).appendDouble(9).build().filter(0, 2), - DoubleBlock.newBlockBuilder(2).appendDouble(9).appendNull().appendDouble(9).build().filter(0, 2) + blockFactory.newDoubleArrayVector(new double[] { 9, 9 }, 2).filter(0, 1).asBlock(), + blockFactory.newDoubleArrayVector(new double[] { 9, 9, 4 }, 2).filter(0, 1).asBlock(), + blockFactory.newDoubleArrayVector(new double[] { 9, 9, 4 }, 3).filter(0, 1).asBlock(), + blockFactory.newDoubleArrayVector(new double[] { 9, 4, 9 }, 3).filter(0, 2).asBlock(), + blockFactory.newConstantDoubleBlockWith(9, 2), + blockFactory.newDoubleBlockBuilder(2).appendDouble(9).appendDouble(9).build(), + blockFactory.newDoubleBlockBuilder(2).appendDouble(9).appendDouble(9).build().filter(0, 1), + blockFactory.newDoubleBlockBuilder(2).appendDouble(9).appendDouble(4).appendDouble(9).build().filter(0, 2), + blockFactory.newDoubleBlockBuilder(2).appendDouble(9).appendNull().appendDouble(9).build().filter(0, 2) ); assertAllEquals(moreBlocks); } @@ -177,15 +185,15 @@ public void testBlockEquality() { public void testVectorInequality() { // all these vectors should NOT be equivalent List notEqualVectors = List.of( - new DoubleArrayVector(new double[] { 1 }, 1), - new DoubleArrayVector(new double[] { 9 }, 1), - new DoubleArrayVector(new double[] { 1, 2 }, 2), - new DoubleArrayVector(new double[] { 1, 2, 3 }, 3), - new DoubleArrayVector(new double[] { 1, 2, 4 }, 3), - DoubleBlock.newConstantBlockWith(9, 2).asVector(), - DoubleBlock.newBlockBuilder(2).appendDouble(1).appendDouble(2).build().asVector().filter(1), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(5).build().asVector(), - DoubleBlock.newBlockBuilder(1).appendDouble(1).appendDouble(2).appendDouble(3).appendDouble(4).build().asVector() + blockFactory.newDoubleArrayVector(new double[] { 1 }, 1), + blockFactory.newDoubleArrayVector(new double[] { 9 }, 1), + blockFactory.newDoubleArrayVector(new double[] { 1, 2 }, 2), + blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3), + blockFactory.newDoubleArrayVector(new double[] { 1, 2, 4 }, 3), + blockFactory.newConstantDoubleBlockWith(9, 2).asVector(), + blockFactory.newDoubleBlockBuilder(2).appendDouble(1).appendDouble(2).build().asVector().filter(1), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(5).build().asVector(), + blockFactory.newDoubleBlockBuilder(1).appendDouble(1).appendDouble(2).appendDouble(3).appendDouble(4).build().asVector() ); assertAllNotEquals(notEqualVectors); } @@ -193,27 +201,27 @@ public void testVectorInequality() { public void testBlockInequality() { // all these blocks should NOT be equivalent List notEqualBlocks = List.of( - new DoubleArrayVector(new double[] { 1 }, 1).asBlock(), - new DoubleArrayVector(new double[] { 9 }, 1).asBlock(), - new DoubleArrayVector(new double[] { 1, 2 }, 2).asBlock(), - new DoubleArrayVector(new double[] { 1, 2, 3 }, 3).asBlock(), - new DoubleArrayVector(new double[] { 1, 2, 4 }, 3).asBlock(), - DoubleBlock.newConstantBlockWith(9, 2), - DoubleBlock.newBlockBuilder(2).appendDouble(1).appendDouble(2).build().filter(1), - DoubleBlock.newBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(5).build(), - DoubleBlock.newBlockBuilder(1).appendDouble(1).appendDouble(2).appendDouble(3).appendDouble(4).build(), - DoubleBlock.newBlockBuilder(1).appendDouble(1).appendNull().build(), - DoubleBlock.newBlockBuilder(1).appendDouble(1).appendNull().appendDouble(3).build(), - DoubleBlock.newBlockBuilder(1).appendDouble(1).appendDouble(3).build(), - DoubleBlock.newBlockBuilder(3).appendDouble(1).beginPositionEntry().appendDouble(2).appendDouble(3).build() + blockFactory.newDoubleArrayVector(new double[] { 1 }, 1).asBlock(), + blockFactory.newDoubleArrayVector(new double[] { 9 }, 1).asBlock(), + blockFactory.newDoubleArrayVector(new double[] { 1, 2 }, 2).asBlock(), + blockFactory.newDoubleArrayVector(new double[] { 1, 2, 3 }, 3).asBlock(), + blockFactory.newDoubleArrayVector(new double[] { 1, 2, 4 }, 3).asBlock(), + blockFactory.newConstantDoubleBlockWith(9, 2), + blockFactory.newDoubleBlockBuilder(2).appendDouble(1).appendDouble(2).build().filter(1), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).appendDouble(2).appendDouble(5).build(), + blockFactory.newDoubleBlockBuilder(1).appendDouble(1).appendDouble(2).appendDouble(3).appendDouble(4).build(), + blockFactory.newDoubleBlockBuilder(1).appendDouble(1).appendNull().build(), + blockFactory.newDoubleBlockBuilder(1).appendDouble(1).appendNull().appendDouble(3).build(), + blockFactory.newDoubleBlockBuilder(1).appendDouble(1).appendDouble(3).build(), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1).beginPositionEntry().appendDouble(2).appendDouble(3).build() ); assertAllNotEquals(notEqualBlocks); } public void testSimpleBlockWithSingleNull() { List blocks = List.of( - DoubleBlock.newBlockBuilder(3).appendDouble(1.1).appendNull().appendDouble(3.1).build(), - DoubleBlock.newBlockBuilder(3).appendDouble(1.1).appendNull().appendDouble(3.1).build() + blockFactory.newDoubleBlockBuilder(3).appendDouble(1.1).appendNull().appendDouble(3.1).build(), + blockFactory.newDoubleBlockBuilder(3).appendDouble(1.1).appendNull().appendDouble(3.1).build() ); assertEquals(3, blocks.get(0).getPositionCount()); assertTrue(blocks.get(0).isNull(1)); @@ -223,8 +231,8 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - DoubleBlock.Builder builder1 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); - DoubleBlock.Builder builder2 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder1 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder2 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); for (int p = 0; p < positions; p++) { builder1.appendNull(); builder2.appendNull(); @@ -241,8 +249,8 @@ public void testSimpleBlockWithManyNulls() { public void testSimpleBlockWithSingleMultiValue() { List blocks = List.of( - DoubleBlock.newBlockBuilder(1).beginPositionEntry().appendDouble(1.1).appendDouble(2.2).build(), - DoubleBlock.newBlockBuilder(1).beginPositionEntry().appendDouble(1.1).appendDouble(2.2).build() + blockFactory.newDoubleBlockBuilder(1).beginPositionEntry().appendDouble(1.1).appendDouble(2.2).build(), + blockFactory.newDoubleBlockBuilder(1).beginPositionEntry().appendDouble(1.1).appendDouble(2.2).build() ); assert blocks.get(0).getPositionCount() == 1 && blocks.get(0).getValueCount(0) == 2; assertAllEquals(blocks); @@ -251,9 +259,9 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - DoubleBlock.Builder builder1 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); - DoubleBlock.Builder builder2 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); - DoubleBlock.Builder builder3 = DoubleBlock.newBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder1 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder2 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); + DoubleBlock.Builder builder3 = blockFactory.newDoubleBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { builder1.beginPositionEntry(); builder2.beginPositionEntry(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FilteredBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FilteredBlockTests.java index f43159b7ce9bd..dc78b3715d12a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FilteredBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/FilteredBlockTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.test.ESTestCase; @@ -196,57 +197,85 @@ public void testFilterOnNoNullsBlock() { public void testFilterToStringSimple() { BitSet nulls = BitSet.valueOf(new byte[] { 0x08 }); // any non-empty bitset, that does not affect the filter, should suffice - var boolVector = new BooleanArrayVector(new boolean[] { true, false, false, true }, 4); - var boolBlock = new BooleanArrayBlock( + var boolVector = blockFactory.newBooleanArrayVector(new boolean[] { true, false, false, true }, 4); + var boolBlock = blockFactory.newBooleanArrayBlock( new boolean[] { true, false, false, true }, 4, null, nulls, randomFrom(Block.MvOrdering.values()) ); - for (Object obj : List.of(boolVector.filter(0, 2), boolVector.asBlock().filter(0, 2), boolBlock.filter(0, 2))) { + for (Releasable obj : List.of(boolVector.filter(0, 2), boolVector.asBlock().filter(0, 2), boolBlock.filter(0, 2))) { String s = obj.toString(); assertThat(s, containsString("[true, false]")); assertThat(s, containsString("positions=2")); + Releasables.close(obj); } + Releasables.close(boolVector, boolBlock); - var intVector = new IntArrayVector(new int[] { 10, 20, 30, 40 }, 4); - var intBlock = new IntArrayBlock(new int[] { 10, 20, 30, 40 }, 4, null, nulls, randomFrom(Block.MvOrdering.values())); - for (Object obj : List.of(intVector.filter(0, 2), intVector.asBlock().filter(0, 2), intBlock.filter(0, 2))) { + var intVector = blockFactory.newIntArrayVector(new int[] { 10, 20, 30, 40 }, 4); + var intBlock = blockFactory.newIntArrayBlock(new int[] { 10, 20, 30, 40 }, 4, null, nulls, randomFrom(Block.MvOrdering.values())); + for (Releasable obj : List.of(intVector.filter(0, 2), intVector.asBlock().filter(0, 2), intBlock.filter(0, 2))) { String s = obj.toString(); assertThat(s, containsString("[10, 30]")); assertThat(s, containsString("positions=2")); + Releasables.close(obj); } + Releasables.close(intVector, intBlock); - var longVector = new LongArrayVector(new long[] { 100L, 200L, 300L, 400L }, 4); - var longBlock = new LongArrayBlock(new long[] { 100L, 200L, 300L, 400L }, 4, null, nulls, randomFrom(Block.MvOrdering.values())); - for (Object obj : List.of(longVector.filter(0, 2), longVector.asBlock().filter(0, 2), longBlock.filter(0, 2))) { + var longVector = blockFactory.newLongArrayVector(new long[] { 100L, 200L, 300L, 400L }, 4); + var longBlock = blockFactory.newLongArrayBlock( + new long[] { 100L, 200L, 300L, 400L }, + 4, + null, + nulls, + randomFrom(Block.MvOrdering.values()) + ); + for (Releasable obj : List.of(longVector.filter(0, 2), longVector.asBlock().filter(0, 2), longBlock.filter(0, 2))) { String s = obj.toString(); assertThat(s, containsString("[100, 300]")); assertThat(s, containsString("positions=2")); + Releasables.close(obj); } - var doubleVector = new DoubleArrayVector(new double[] { 1.1, 2.2, 3.3, 4.4 }, 4); - var doubleBlock = new DoubleArrayBlock(new double[] { 1.1, 2.2, 3.3, 4.4 }, 4, null, nulls, randomFrom(Block.MvOrdering.values())); - for (Object obj : List.of(doubleVector.filter(0, 2), doubleVector.asBlock().filter(0, 2), doubleBlock.filter(0, 2))) { + Releasables.close(longVector, longBlock); + + var doubleVector = blockFactory.newDoubleArrayVector(new double[] { 1.1, 2.2, 3.3, 4.4 }, 4); + var doubleBlock = blockFactory.newDoubleArrayBlock( + new double[] { 1.1, 2.2, 3.3, 4.4 }, + 4, + null, + nulls, + randomFrom(Block.MvOrdering.values()) + ); + for (Releasable obj : List.of(doubleVector.filter(0, 2), doubleVector.asBlock().filter(0, 2), doubleBlock.filter(0, 2))) { String s = obj.toString(); assertThat(s, containsString("[1.1, 3.3]")); assertThat(s, containsString("positions=2")); + Releasables.close(obj); } + Releasables.close(doubleVector, doubleBlock); + assert new BytesRef("1a").toString().equals("[31 61]") && new BytesRef("3c").toString().equals("[33 63]"); - try (var bytesRefArray = arrayOf("1a", "2b", "3c", "4d")) { - var bytesRefVector = new BytesRefArrayVector(bytesRefArray, 4); - var bytesRefBlock = new BytesRefArrayBlock(bytesRefArray, 4, null, nulls, randomFrom(Block.MvOrdering.values())); - for (Object obj : List.of(bytesRefVector.filter(0, 2), bytesRefVector.asBlock().filter(0, 2), bytesRefBlock.filter(0, 2))) { - assertThat( - obj.toString(), - either(equalTo("BytesRefArrayVector[positions=2]")).or( - equalTo("BytesRefVectorBlock[vector=BytesRefArrayVector[positions=2]]") - ) - ); - } + var bytesRefVector = blockFactory.newBytesRefArrayVector(arrayOf("1a", "2b", "3c", "4d"), 4); + var bytesRefBlock = blockFactory.newBytesRefArrayBlock( + arrayOf("1a", "2b", "3c", "4d"), + 4, + null, + nulls, + randomFrom(Block.MvOrdering.values()) + ); + for (Releasable obj : List.of(bytesRefVector.filter(0, 2), bytesRefVector.asBlock().filter(0, 2), bytesRefBlock.filter(0, 2))) { + assertThat( + obj.toString(), + either(equalTo("BytesRefArrayVector[positions=2]")).or( + equalTo("BytesRefVectorBlock[vector=BytesRefArrayVector[positions=2]]") + ) + ); + Releasables.close(obj); } + Releasables.close(bytesRefVector, bytesRefBlock); } public void testFilterToStringMultiValue() { @@ -259,7 +288,10 @@ public void testFilterToStringMultiValue() { var filter = block.filter(0, 1); assertThat( filter.toString(), - containsString("BooleanArrayBlock[positions=2, mvOrdering=UNORDERED, values=[true, true, false, false]]") + containsString( + "BooleanArrayBlock[positions=2, mvOrdering=UNORDERED, " + + "vector=BooleanArrayVector[positions=4, values=[true, true, false, false]]]" + ) ); Releasables.close(builder, block); releaseAndAssertBreaker(filter); @@ -271,7 +303,12 @@ public void testFilterToStringMultiValue() { builder.beginPositionEntry().appendInt(90).appendInt(1000).endPositionEntry(); var block = builder.build(); var filter = block.filter(0, 1); - assertThat(filter.toString(), containsString("IntArrayBlock[positions=2, mvOrdering=UNORDERED, values=[0, 10, 20, 50]]")); + assertThat( + filter.toString(), + containsString( + "IntArrayBlock[positions=2, mvOrdering=UNORDERED, vector=IntArrayVector[positions=4, values=[0, 10, 20, 50]]]" + ) + ); Releasables.close(builder, block); releaseAndAssertBreaker(filter); } @@ -282,7 +319,12 @@ public void testFilterToStringMultiValue() { builder.beginPositionEntry().appendLong(90).appendLong(1000).endPositionEntry(); var block = builder.build(); var filter = block.filter(0, 1); - assertThat(filter.toString(), containsString("LongArrayBlock[positions=2, mvOrdering=UNORDERED, values=[0, 10, 20, 50]]")); + assertThat( + filter.toString(), + containsString( + "LongArrayBlock[positions=2, mvOrdering=UNORDERED, vector=LongArrayVector[positions=4, values=[0, 10, 20, 50]]]" + ) + ); Releasables.close(builder, block); releaseAndAssertBreaker(filter); } @@ -295,7 +337,10 @@ public void testFilterToStringMultiValue() { var filter = block.filter(0, 1); assertThat( filter.toString(), - containsString("DoubleArrayBlock[positions=2, mvOrdering=UNORDERED, values=[0.0, 10.0, 0.002, 1.0E9]]") + containsString( + "DoubleArrayBlock[positions=2, mvOrdering=UNORDERED, " + + "vector=DoubleArrayVector[positions=4, values=[0.0, 10.0, 0.002, 1.0E9]]]" + ) ); Releasables.close(builder, block); releaseAndAssertBreaker(filter); @@ -309,7 +354,10 @@ public void testFilterToStringMultiValue() { builder.beginPositionEntry().appendBytesRef(new BytesRef("pig")).appendBytesRef(new BytesRef("chicken")).endPositionEntry(); var block = builder.build(); var filter = block.filter(0, 1); - assertThat(filter.toString(), containsString("BytesRefArrayBlock[positions=2, mvOrdering=UNORDERED, values=4]")); + assertThat( + filter.toString(), + containsString("BytesRefArrayBlock[positions=2, mvOrdering=UNORDERED, vector=BytesRefArrayVector[positions=4]]") + ); assertThat(filter.getPositionCount(), equalTo(2)); Releasables.close(builder, block); releaseAndAssertBreaker(filter); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java index 40c84324f13d2..6c1be6231e82c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/IntBlockEqualityTests.java @@ -7,22 +7,24 @@ package org.elasticsearch.compute.data; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.compute.operator.ComputeTestCase; import java.util.BitSet; import java.util.List; -public class IntBlockEqualityTests extends ESTestCase { +public class IntBlockEqualityTests extends ComputeTestCase { + + static final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); public void testEmptyVector() { // all these "empty" vectors should be equivalent List vectors = List.of( - new IntArrayVector(new int[] {}, 0), - new IntArrayVector(new int[] { 0 }, 0), - IntBlock.newConstantBlockWith(0, 0).asVector(), - IntBlock.newConstantBlockWith(0, 0).filter().asVector(), - IntBlock.newBlockBuilder(0).build().asVector(), - IntBlock.newBlockBuilder(0).appendInt(1).build().asVector().filter() + blockFactory.newIntArrayVector(new int[] {}, 0), + blockFactory.newIntArrayVector(new int[] { 0 }, 0), + blockFactory.newConstantIntVector(0, 0), + blockFactory.newConstantIntVector(0, 0).filter(), + blockFactory.newIntBlockBuilder(0).build().asVector(), + blockFactory.newIntBlockBuilder(0).appendInt(1).build().asVector().filter() ); assertAllEquals(vectors); } @@ -30,12 +32,24 @@ public void testEmptyVector() { public void testEmptyBlock() { // all these "empty" vectors should be equivalent List blocks = List.of( - new IntArrayBlock(new int[] {}, 0, new int[] {}, BitSet.valueOf(new byte[] { 0b00 }), randomFrom(Block.MvOrdering.values())), - new IntArrayBlock(new int[] { 0 }, 0, new int[] {}, BitSet.valueOf(new byte[] { 0b00 }), randomFrom(Block.MvOrdering.values())), - IntBlock.newConstantBlockWith(0, 0), - IntBlock.newBlockBuilder(0).build(), - IntBlock.newBlockBuilder(0).appendInt(1).build().filter(), - IntBlock.newBlockBuilder(0).appendNull().build().filter() + blockFactory.newIntArrayBlock( + new int[] {}, + 0, + new int[] {}, + BitSet.valueOf(new byte[] { 0b00 }), + randomFrom(Block.MvOrdering.values()) + ), + blockFactory.newIntArrayBlock( + new int[] { 0 }, + 0, + new int[] {}, + BitSet.valueOf(new byte[] { 0b00 }), + randomFrom(Block.MvOrdering.values()) + ), + blockFactory.newConstantIntBlockWith(0, 0), + blockFactory.newIntBlockBuilder(0).build(), + blockFactory.newIntBlockBuilder(0).appendInt(1).build().filter(), + blockFactory.newIntBlockBuilder(0).appendNull().build().filter() ); assertAllEquals(blocks); } @@ -43,34 +57,34 @@ public void testEmptyBlock() { public void testVectorEquality() { // all these vectors should be equivalent List vectors = List.of( - new IntArrayVector(new int[] { 1, 2, 3 }, 3), - new IntArrayVector(new int[] { 1, 2, 3 }, 3).asBlock().asVector(), - new IntArrayVector(new int[] { 1, 2, 3, 4 }, 3), - new IntArrayVector(new int[] { 1, 2, 3 }, 3).filter(0, 1, 2), - new IntArrayVector(new int[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2), - new IntArrayVector(new int[] { 0, 1, 2, 3 }, 4).filter(1, 2, 3), - new IntArrayVector(new int[] { 1, 4, 2, 3 }, 4).filter(0, 2, 3), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build().asVector(), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build().asVector().filter(0, 1, 2), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(4).appendInt(2).appendInt(3).build().filter(0, 2, 3).asVector(), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(4).appendInt(2).appendInt(3).build().asVector().filter(0, 2, 3) + blockFactory.newIntArrayVector(new int[] { 1, 2, 3 }, 3), + blockFactory.newIntArrayVector(new int[] { 1, 2, 3 }, 3).asBlock().asVector(), + blockFactory.newIntArrayVector(new int[] { 1, 2, 3, 4 }, 3), + blockFactory.newIntArrayVector(new int[] { 1, 2, 3 }, 3).filter(0, 1, 2), + blockFactory.newIntArrayVector(new int[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2), + blockFactory.newIntArrayVector(new int[] { 0, 1, 2, 3 }, 4).filter(1, 2, 3), + blockFactory.newIntArrayVector(new int[] { 1, 4, 2, 3 }, 4).filter(0, 2, 3), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build().asVector(), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build().asVector().filter(0, 1, 2), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(4).appendInt(2).appendInt(3).build().filter(0, 2, 3).asVector(), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(4).appendInt(2).appendInt(3).build().asVector().filter(0, 2, 3) ); assertAllEquals(vectors); // all these constant-like vectors should be equivalent List moreVectors = List.of( - new IntArrayVector(new int[] { 1, 1, 1 }, 3), - new IntArrayVector(new int[] { 1, 1, 1 }, 3).asBlock().asVector(), - new IntArrayVector(new int[] { 1, 1, 1, 1 }, 3), - new IntArrayVector(new int[] { 1, 1, 1 }, 3).filter(0, 1, 2), - new IntArrayVector(new int[] { 1, 1, 1, 4 }, 4).filter(0, 1, 2), - new IntArrayVector(new int[] { 3, 1, 1, 1 }, 4).filter(1, 2, 3), - new IntArrayVector(new int[] { 1, 4, 1, 1 }, 4).filter(0, 2, 3), - IntBlock.newConstantBlockWith(1, 3).asVector(), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(1).appendInt(1).build().asVector(), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(1).appendInt(1).build().asVector().filter(0, 1, 2), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(4).appendInt(1).appendInt(1).build().filter(0, 2, 3).asVector(), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(4).appendInt(1).appendInt(1).build().asVector().filter(0, 2, 3) + blockFactory.newIntArrayVector(new int[] { 1, 1, 1 }, 3), + blockFactory.newIntArrayVector(new int[] { 1, 1, 1 }, 3).asBlock().asVector(), + blockFactory.newIntArrayVector(new int[] { 1, 1, 1, 1 }, 3), + blockFactory.newIntArrayVector(new int[] { 1, 1, 1 }, 3).filter(0, 1, 2), + blockFactory.newIntArrayVector(new int[] { 1, 1, 1, 4 }, 4).filter(0, 1, 2), + blockFactory.newIntArrayVector(new int[] { 3, 1, 1, 1 }, 4).filter(1, 2, 3), + blockFactory.newIntArrayVector(new int[] { 1, 4, 1, 1 }, 4).filter(0, 2, 3), + blockFactory.newConstantIntBlockWith(1, 3).asVector(), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(1).appendInt(1).build().asVector(), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(1).appendInt(1).build().asVector().filter(0, 1, 2), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(4).appendInt(1).appendInt(1).build().filter(0, 2, 3).asVector(), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(4).appendInt(1).appendInt(1).build().asVector().filter(0, 2, 3) ); assertAllEquals(moreVectors); } @@ -78,58 +92,60 @@ public void testVectorEquality() { public void testBlockEquality() { // all these blocks should be equivalent List blocks = List.of( - new IntArrayVector(new int[] { 1, 2, 3 }, 3).asBlock(), + new IntArrayVector(new int[] { 1, 2, 3 }, 3, blockFactory).asBlock(), new IntArrayBlock( new int[] { 1, 2, 3 }, 3, new int[] { 0, 1, 2, 3 }, BitSet.valueOf(new byte[] { 0b000 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), new IntArrayBlock( new int[] { 1, 2, 3, 4 }, 3, new int[] { 0, 1, 2, 3 }, BitSet.valueOf(new byte[] { 0b1000 }), - randomFrom(Block.MvOrdering.values()) + randomFrom(Block.MvOrdering.values()), + blockFactory ), - new IntArrayVector(new int[] { 1, 2, 3 }, 3).filter(0, 1, 2).asBlock(), - new IntArrayVector(new int[] { 1, 2, 3, 4 }, 3).filter(0, 1, 2).asBlock(), - new IntArrayVector(new int[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2).asBlock(), - new IntArrayVector(new int[] { 1, 2, 4, 3 }, 4).filter(0, 1, 3).asBlock(), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build(), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build().filter(0, 1, 2), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(4).appendInt(2).appendInt(3).build().filter(0, 2, 3), - IntBlock.newBlockBuilder(3).appendInt(1).appendNull().appendInt(2).appendInt(3).build().filter(0, 2, 3) + new IntArrayVector(new int[] { 1, 2, 3 }, 3, blockFactory).filter(0, 1, 2).asBlock(), + new IntArrayVector(new int[] { 1, 2, 3, 4 }, 3, blockFactory).filter(0, 1, 2).asBlock(), + new IntArrayVector(new int[] { 1, 2, 3, 4 }, 4, blockFactory).filter(0, 1, 2).asBlock(), + new IntArrayVector(new int[] { 1, 2, 4, 3 }, 4, blockFactory).filter(0, 1, 3).asBlock(), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build(), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(2).appendInt(3).build().filter(0, 1, 2), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(4).appendInt(2).appendInt(3).build().filter(0, 2, 3), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendNull().appendInt(2).appendInt(3).build().filter(0, 2, 3) ); assertAllEquals(blocks); // all these constant-like blocks should be equivalent List moreBlocks = List.of( - new IntArrayVector(new int[] { 9, 9 }, 2).asBlock(), - new IntArrayBlock( + blockFactory.newIntArrayVector(new int[] { 9, 9 }, 2).asBlock(), + blockFactory.newIntArrayBlock( new int[] { 9, 9 }, 2, new int[] { 0, 1, 2 }, BitSet.valueOf(new byte[] { 0b000 }), randomFrom(Block.MvOrdering.values()) ), - new IntArrayBlock( + blockFactory.newIntArrayBlock( new int[] { 9, 9, 4 }, 2, new int[] { 0, 1, 2 }, BitSet.valueOf(new byte[] { 0b100 }), randomFrom(Block.MvOrdering.values()) ), - new IntArrayVector(new int[] { 9, 9 }, 2).filter(0, 1).asBlock(), - new IntArrayVector(new int[] { 9, 9, 4 }, 2).filter(0, 1).asBlock(), - new IntArrayVector(new int[] { 9, 9, 4 }, 3).filter(0, 1).asBlock(), - new IntArrayVector(new int[] { 9, 4, 9 }, 3).filter(0, 2).asBlock(), - IntBlock.newConstantBlockWith(9, 2), - IntBlock.newBlockBuilder(2).appendInt(9).appendInt(9).build(), - IntBlock.newBlockBuilder(2).appendInt(9).appendInt(9).build().filter(0, 1), - IntBlock.newBlockBuilder(2).appendInt(9).appendInt(4).appendInt(9).build().filter(0, 2), - IntBlock.newBlockBuilder(2).appendInt(9).appendNull().appendInt(9).build().filter(0, 2) + blockFactory.newIntArrayVector(new int[] { 9, 9 }, 2).filter(0, 1).asBlock(), + blockFactory.newIntArrayVector(new int[] { 9, 9, 4 }, 2).filter(0, 1).asBlock(), + blockFactory.newIntArrayVector(new int[] { 9, 9, 4 }, 3).filter(0, 1).asBlock(), + blockFactory.newIntArrayVector(new int[] { 9, 4, 9 }, 3).filter(0, 2).asBlock(), + blockFactory.newConstantIntBlockWith(9, 2), + blockFactory.newIntBlockBuilder(2).appendInt(9).appendInt(9).build(), + blockFactory.newIntBlockBuilder(2).appendInt(9).appendInt(9).build().filter(0, 1), + blockFactory.newIntBlockBuilder(2).appendInt(9).appendInt(4).appendInt(9).build().filter(0, 2), + blockFactory.newIntBlockBuilder(2).appendInt(9).appendNull().appendInt(9).build().filter(0, 2) ); assertAllEquals(moreBlocks); } @@ -137,15 +153,15 @@ public void testBlockEquality() { public void testVectorInequality() { // all these vectors should NOT be equivalent List notEqualVectors = List.of( - new IntArrayVector(new int[] { 1 }, 1), - new IntArrayVector(new int[] { 9 }, 1), - new IntArrayVector(new int[] { 1, 2 }, 2), - new IntArrayVector(new int[] { 1, 2, 3 }, 3), - new IntArrayVector(new int[] { 1, 2, 4 }, 3), - IntBlock.newConstantBlockWith(9, 2).asVector(), - IntBlock.newBlockBuilder(2).appendInt(1).appendInt(2).build().asVector().filter(1), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(2).appendInt(5).build().asVector(), - IntBlock.newBlockBuilder(1).appendInt(1).appendInt(2).appendInt(3).appendInt(4).build().asVector() + blockFactory.newIntArrayVector(new int[] { 1 }, 1), + blockFactory.newIntArrayVector(new int[] { 9 }, 1), + blockFactory.newIntArrayVector(new int[] { 1, 2 }, 2), + blockFactory.newIntArrayVector(new int[] { 1, 2, 3 }, 3), + blockFactory.newIntArrayVector(new int[] { 1, 2, 4 }, 3), + blockFactory.newConstantIntBlockWith(9, 2).asVector(), + blockFactory.newIntBlockBuilder(2).appendInt(1).appendInt(2).build().asVector().filter(1), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(2).appendInt(5).build().asVector(), + blockFactory.newIntBlockBuilder(1).appendInt(1).appendInt(2).appendInt(3).appendInt(4).build().asVector() ); assertAllNotEquals(notEqualVectors); } @@ -153,27 +169,27 @@ public void testVectorInequality() { public void testBlockInequality() { // all these blocks should NOT be equivalent List notEqualBlocks = List.of( - new IntArrayVector(new int[] { 1 }, 1).asBlock(), - new IntArrayVector(new int[] { 9 }, 1).asBlock(), - new IntArrayVector(new int[] { 1, 2 }, 2).asBlock(), - new IntArrayVector(new int[] { 1, 2, 3 }, 3).asBlock(), - new IntArrayVector(new int[] { 1, 2, 4 }, 3).asBlock(), - IntBlock.newConstantBlockWith(9, 2), - IntBlock.newBlockBuilder(2).appendInt(1).appendInt(2).build().filter(1), - IntBlock.newBlockBuilder(3).appendInt(1).appendInt(2).appendInt(5).build(), - IntBlock.newBlockBuilder(1).appendInt(1).appendInt(2).appendInt(3).appendInt(4).build(), - IntBlock.newBlockBuilder(1).appendInt(1).appendNull().build(), - IntBlock.newBlockBuilder(1).appendInt(1).appendNull().appendInt(3).build(), - IntBlock.newBlockBuilder(1).appendInt(1).appendInt(3).build(), - IntBlock.newBlockBuilder(3).appendInt(1).beginPositionEntry().appendInt(2).appendInt(3).build() + blockFactory.newIntArrayVector(new int[] { 1 }, 1).asBlock(), + blockFactory.newIntArrayVector(new int[] { 9 }, 1).asBlock(), + blockFactory.newIntArrayVector(new int[] { 1, 2 }, 2).asBlock(), + blockFactory.newIntArrayVector(new int[] { 1, 2, 3 }, 3).asBlock(), + blockFactory.newIntArrayVector(new int[] { 1, 2, 4 }, 3).asBlock(), + blockFactory.newConstantIntBlockWith(9, 2), + blockFactory.newIntBlockBuilder(2).appendInt(1).appendInt(2).build().filter(1), + blockFactory.newIntBlockBuilder(3).appendInt(1).appendInt(2).appendInt(5).build(), + blockFactory.newIntBlockBuilder(1).appendInt(1).appendInt(2).appendInt(3).appendInt(4).build(), + blockFactory.newIntBlockBuilder(1).appendInt(1).appendNull().build(), + blockFactory.newIntBlockBuilder(1).appendInt(1).appendNull().appendInt(3).build(), + blockFactory.newIntBlockBuilder(1).appendInt(1).appendInt(3).build(), + blockFactory.newIntBlockBuilder(3).appendInt(1).beginPositionEntry().appendInt(2).appendInt(3).build() ); assertAllNotEquals(notEqualBlocks); } public void testSimpleBlockWithSingleNull() { List blocks = List.of( - IntBlock.newBlockBuilder(1).appendInt(1).appendNull().appendInt(3).build(), - IntBlock.newBlockBuilder(1).appendInt(1).appendNull().appendInt(3).build() + blockFactory.newIntBlockBuilder(1).appendInt(1).appendNull().appendInt(3).build(), + blockFactory.newIntBlockBuilder(1).appendInt(1).appendNull().appendInt(3).build() ); assertEquals(3, blocks.get(0).getPositionCount()); assertTrue(blocks.get(0).isNull(1)); @@ -184,8 +200,8 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - IntBlock.Builder builder1 = IntBlock.newBlockBuilder(grow ? 0 : positions); - IntBlock.Builder builder2 = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder1 = blockFactory.newIntBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder2 = blockFactory.newIntBlockBuilder(grow ? 0 : positions); for (int p = 0; p < positions; p++) { builder1.appendNull(); builder2.appendNull(); @@ -202,8 +218,8 @@ public void testSimpleBlockWithManyNulls() { public void testSimpleBlockWithSingleMultiValue() { List blocks = List.of( - IntBlock.newBlockBuilder(1).beginPositionEntry().appendInt(1).appendInt(2).build(), - IntBlock.newBlockBuilder(1).beginPositionEntry().appendInt(1).appendInt(2).build() + blockFactory.newIntBlockBuilder(1).beginPositionEntry().appendInt(1).appendInt(2).build(), + blockFactory.newIntBlockBuilder(1).beginPositionEntry().appendInt(1).appendInt(2).build() ); assertEquals(1, blocks.get(0).getPositionCount()); assertEquals(2, blocks.get(0).getValueCount(0)); @@ -213,9 +229,9 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - IntBlock.Builder builder1 = IntBlock.newBlockBuilder(grow ? 0 : positions); - IntBlock.Builder builder2 = IntBlock.newBlockBuilder(grow ? 0 : positions); - IntBlock.Builder builder3 = IntBlock.newBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder1 = blockFactory.newIntBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder2 = blockFactory.newIntBlockBuilder(grow ? 0 : positions); + IntBlock.Builder builder3 = blockFactory.newIntBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { builder1.beginPositionEntry(); builder2.beginPositionEntry(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java index a24b4a4dd6fa6..27a2f9702a0ae 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/LongBlockEqualityTests.java @@ -7,22 +7,24 @@ package org.elasticsearch.compute.data; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.compute.operator.ComputeTestCase; import java.util.BitSet; import java.util.List; -public class LongBlockEqualityTests extends ESTestCase { +public class LongBlockEqualityTests extends ComputeTestCase { + + static final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); public void testEmptyVector() { // all these "empty" vectors should be equivalent List vectors = List.of( - new LongArrayVector(new long[] {}, 0), - new LongArrayVector(new long[] { 0 }, 0), - LongBlock.newConstantBlockWith(0, 0).asVector(), - LongBlock.newConstantBlockWith(0, 0).filter().asVector(), - LongBlock.newBlockBuilder(0).build().asVector(), - LongBlock.newBlockBuilder(0).appendLong(1).build().asVector().filter() + blockFactory.newLongArrayVector(new long[] {}, 0), + blockFactory.newLongArrayVector(new long[] { 0 }, 0), + blockFactory.newConstantLongBlockWith(0, 0).asVector(), + blockFactory.newConstantLongBlockWith(0, 0).filter().asVector(), + blockFactory.newLongBlockBuilder(0).build().asVector(), + blockFactory.newLongBlockBuilder(0).appendLong(1).build().asVector().filter() ); assertAllEquals(vectors); } @@ -30,18 +32,24 @@ public void testEmptyVector() { public void testEmptyBlock() { // all these "empty" vectors should be equivalent List blocks = List.of( - new LongArrayBlock(new long[] {}, 0, new int[] {}, BitSet.valueOf(new byte[] { 0b00 }), randomFrom(Block.MvOrdering.values())), - new LongArrayBlock( + blockFactory.newLongArrayBlock( + new long[] {}, + 0, + new int[] {}, + BitSet.valueOf(new byte[] { 0b00 }), + randomFrom(Block.MvOrdering.values()) + ), + blockFactory.newLongArrayBlock( new long[] { 0 }, 0, new int[] {}, BitSet.valueOf(new byte[] { 0b00 }), randomFrom(Block.MvOrdering.values()) ), - LongBlock.newConstantBlockWith(0, 0), - LongBlock.newBlockBuilder(0).build(), - LongBlock.newBlockBuilder(0).appendLong(1).build().filter(), - LongBlock.newBlockBuilder(0).appendNull().build().filter() + blockFactory.newConstantLongBlockWith(0, 0), + blockFactory.newLongBlockBuilder(0).build(), + blockFactory.newLongBlockBuilder(0).appendLong(1).build().filter(), + blockFactory.newLongBlockBuilder(0).appendNull().build().filter() ); assertAllEquals(blocks); } @@ -49,34 +57,34 @@ public void testEmptyBlock() { public void testVectorEquality() { // all these vectors should be equivalent List vectors = List.of( - new LongArrayVector(new long[] { 1, 2, 3 }, 3), - new LongArrayVector(new long[] { 1, 2, 3 }, 3).asBlock().asVector(), - new LongArrayVector(new long[] { 1, 2, 3, 4 }, 3), - new LongArrayVector(new long[] { 1, 2, 3 }, 3).filter(0, 1, 2), - new LongArrayVector(new long[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2), - new LongArrayVector(new long[] { 0, 1, 2, 3 }, 4).filter(1, 2, 3), - new LongArrayVector(new long[] { 1, 4, 2, 3 }, 4).filter(0, 2, 3), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build().asVector(), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build().asVector().filter(0, 1, 2), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(4).appendLong(2).appendLong(3).build().filter(0, 2, 3).asVector(), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(4).appendLong(2).appendLong(3).build().asVector().filter(0, 2, 3) + blockFactory.newLongArrayVector(new long[] { 1, 2, 3 }, 3), + blockFactory.newLongArrayVector(new long[] { 1, 2, 3 }, 3).asBlock().asVector(), + blockFactory.newLongArrayVector(new long[] { 1, 2, 3, 4 }, 3), + blockFactory.newLongArrayVector(new long[] { 1, 2, 3 }, 3).filter(0, 1, 2), + blockFactory.newLongArrayVector(new long[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2), + blockFactory.newLongArrayVector(new long[] { 0, 1, 2, 3 }, 4).filter(1, 2, 3), + blockFactory.newLongArrayVector(new long[] { 1, 4, 2, 3 }, 4).filter(0, 2, 3), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build().asVector(), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build().asVector().filter(0, 1, 2), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(4).appendLong(2).appendLong(3).build().filter(0, 2, 3).asVector(), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(4).appendLong(2).appendLong(3).build().asVector().filter(0, 2, 3) ); assertAllEquals(vectors); // all these constant-like vectors should be equivalent List moreVectors = List.of( - new LongArrayVector(new long[] { 1, 1, 1 }, 3), - new LongArrayVector(new long[] { 1, 1, 1 }, 3).asBlock().asVector(), - new LongArrayVector(new long[] { 1, 1, 1, 1 }, 3), - new LongArrayVector(new long[] { 1, 1, 1 }, 3).filter(0, 1, 2), - new LongArrayVector(new long[] { 1, 1, 1, 4 }, 4).filter(0, 1, 2), - new LongArrayVector(new long[] { 3, 1, 1, 1 }, 4).filter(1, 2, 3), - new LongArrayVector(new long[] { 1, 4, 1, 1 }, 4).filter(0, 2, 3), - LongBlock.newConstantBlockWith(1, 3).asVector(), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(1).appendLong(1).build().asVector(), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(1).appendLong(1).build().asVector().filter(0, 1, 2), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(4).appendLong(1).appendLong(1).build().filter(0, 2, 3).asVector(), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(4).appendLong(1).appendLong(1).build().asVector().filter(0, 2, 3) + blockFactory.newLongArrayVector(new long[] { 1, 1, 1 }, 3), + blockFactory.newLongArrayVector(new long[] { 1, 1, 1 }, 3).asBlock().asVector(), + blockFactory.newLongArrayVector(new long[] { 1, 1, 1, 1 }, 3), + blockFactory.newLongArrayVector(new long[] { 1, 1, 1 }, 3).filter(0, 1, 2), + blockFactory.newLongArrayVector(new long[] { 1, 1, 1, 4 }, 4).filter(0, 1, 2), + blockFactory.newLongArrayVector(new long[] { 3, 1, 1, 1 }, 4).filter(1, 2, 3), + blockFactory.newLongArrayVector(new long[] { 1, 4, 1, 1 }, 4).filter(0, 2, 3), + blockFactory.newConstantLongBlockWith(1, 3).asVector(), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(1).appendLong(1).build().asVector(), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(1).appendLong(1).build().asVector().filter(0, 1, 2), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(4).appendLong(1).appendLong(1).build().filter(0, 2, 3).asVector(), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(4).appendLong(1).appendLong(1).build().asVector().filter(0, 2, 3) ); assertAllEquals(moreVectors); } @@ -84,58 +92,58 @@ public void testVectorEquality() { public void testBlockEquality() { // all these blocks should be equivalent List blocks = List.of( - new LongArrayVector(new long[] { 1, 2, 3 }, 3).asBlock(), - new LongArrayBlock( + blockFactory.newLongArrayVector(new long[] { 1, 2, 3 }, 3).asBlock(), + blockFactory.newLongArrayBlock( new long[] { 1, 2, 3 }, 3, new int[] { 0, 1, 2, 3 }, BitSet.valueOf(new byte[] { 0b000 }), randomFrom(Block.MvOrdering.values()) ), - new LongArrayBlock( + blockFactory.newLongArrayBlock( new long[] { 1, 2, 3, 4 }, 3, new int[] { 0, 1, 2, 3 }, BitSet.valueOf(new byte[] { 0b1000 }), randomFrom(Block.MvOrdering.values()) ), - new LongArrayVector(new long[] { 1, 2, 3 }, 3).filter(0, 1, 2).asBlock(), - new LongArrayVector(new long[] { 1, 2, 3, 4 }, 3).filter(0, 1, 2).asBlock(), - new LongArrayVector(new long[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2).asBlock(), - new LongArrayVector(new long[] { 1, 2, 4, 3 }, 4).filter(0, 1, 3).asBlock(), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build(), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build().filter(0, 1, 2), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(4).appendLong(2).appendLong(3).build().filter(0, 2, 3), - LongBlock.newBlockBuilder(3).appendLong(1).appendNull().appendLong(2).appendLong(3).build().filter(0, 2, 3) + blockFactory.newLongArrayVector(new long[] { 1, 2, 3 }, 3).filter(0, 1, 2).asBlock(), + blockFactory.newLongArrayVector(new long[] { 1, 2, 3, 4 }, 3).filter(0, 1, 2).asBlock(), + blockFactory.newLongArrayVector(new long[] { 1, 2, 3, 4 }, 4).filter(0, 1, 2).asBlock(), + blockFactory.newLongArrayVector(new long[] { 1, 2, 4, 3 }, 4).filter(0, 1, 3).asBlock(), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build(), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(2).appendLong(3).build().filter(0, 1, 2), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(4).appendLong(2).appendLong(3).build().filter(0, 2, 3), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendNull().appendLong(2).appendLong(3).build().filter(0, 2, 3) ); assertAllEquals(blocks); // all these constant-like blocks should be equivalent List moreBlocks = List.of( - new LongArrayVector(new long[] { 9, 9 }, 2).asBlock(), - new LongArrayBlock( + blockFactory.newLongArrayVector(new long[] { 9, 9 }, 2).asBlock(), + blockFactory.newLongArrayBlock( new long[] { 9, 9 }, 2, new int[] { 0, 1, 2 }, BitSet.valueOf(new byte[] { 0b000 }), randomFrom(Block.MvOrdering.values()) ), - new LongArrayBlock( + blockFactory.newLongArrayBlock( new long[] { 9, 9, 4 }, 2, new int[] { 0, 1, 2 }, BitSet.valueOf(new byte[] { 0b100 }), randomFrom(Block.MvOrdering.values()) ), - new LongArrayVector(new long[] { 9, 9 }, 2).filter(0, 1).asBlock(), - new LongArrayVector(new long[] { 9, 9, 4 }, 2).filter(0, 1).asBlock(), - new LongArrayVector(new long[] { 9, 9, 4 }, 3).filter(0, 1).asBlock(), - new LongArrayVector(new long[] { 9, 4, 9 }, 3).filter(0, 2).asBlock(), - LongBlock.newConstantBlockWith(9, 2), - LongBlock.newBlockBuilder(2).appendLong(9).appendLong(9).build(), - LongBlock.newBlockBuilder(2).appendLong(9).appendLong(9).build().filter(0, 1), - LongBlock.newBlockBuilder(2).appendLong(9).appendLong(4).appendLong(9).build().filter(0, 2), - LongBlock.newBlockBuilder(2).appendLong(9).appendNull().appendLong(9).build().filter(0, 2) + blockFactory.newLongArrayVector(new long[] { 9, 9 }, 2).filter(0, 1).asBlock(), + blockFactory.newLongArrayVector(new long[] { 9, 9, 4 }, 2).filter(0, 1).asBlock(), + blockFactory.newLongArrayVector(new long[] { 9, 9, 4 }, 3).filter(0, 1).asBlock(), + blockFactory.newLongArrayVector(new long[] { 9, 4, 9 }, 3).filter(0, 2).asBlock(), + blockFactory.newConstantLongBlockWith(9, 2), + blockFactory.newLongBlockBuilder(2).appendLong(9).appendLong(9).build(), + blockFactory.newLongBlockBuilder(2).appendLong(9).appendLong(9).build().filter(0, 1), + blockFactory.newLongBlockBuilder(2).appendLong(9).appendLong(4).appendLong(9).build().filter(0, 2), + blockFactory.newLongBlockBuilder(2).appendLong(9).appendNull().appendLong(9).build().filter(0, 2) ); assertAllEquals(moreBlocks); } @@ -143,15 +151,15 @@ public void testBlockEquality() { public void testVectorInequality() { // all these vectors should NOT be equivalent List notEqualVectors = List.of( - new LongArrayVector(new long[] { 1 }, 1), - new LongArrayVector(new long[] { 9 }, 1), - new LongArrayVector(new long[] { 1, 2 }, 2), - new LongArrayVector(new long[] { 1, 2, 3 }, 3), - new LongArrayVector(new long[] { 1, 2, 4 }, 3), - LongBlock.newConstantBlockWith(9, 2).asVector(), - LongBlock.newBlockBuilder(2).appendLong(1).appendLong(2).build().asVector().filter(1), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(2).appendLong(5).build().asVector(), - LongBlock.newBlockBuilder(1).appendLong(1).appendLong(2).appendLong(3).appendLong(4).build().asVector() + blockFactory.newLongArrayVector(new long[] { 1 }, 1), + blockFactory.newLongArrayVector(new long[] { 9 }, 1), + blockFactory.newLongArrayVector(new long[] { 1, 2 }, 2), + blockFactory.newLongArrayVector(new long[] { 1, 2, 3 }, 3), + blockFactory.newLongArrayVector(new long[] { 1, 2, 4 }, 3), + blockFactory.newConstantLongBlockWith(9, 2).asVector(), + blockFactory.newLongBlockBuilder(2).appendLong(1).appendLong(2).build().asVector().filter(1), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(2).appendLong(5).build().asVector(), + blockFactory.newLongBlockBuilder(1).appendLong(1).appendLong(2).appendLong(3).appendLong(4).build().asVector() ); assertAllNotEquals(notEqualVectors); } @@ -159,27 +167,27 @@ public void testVectorInequality() { public void testBlockInequality() { // all these blocks should NOT be equivalent List notEqualBlocks = List.of( - new LongArrayVector(new long[] { 1 }, 1).asBlock(), - new LongArrayVector(new long[] { 9 }, 1).asBlock(), - new LongArrayVector(new long[] { 1, 2 }, 2).asBlock(), - new LongArrayVector(new long[] { 1, 2, 3 }, 3).asBlock(), - new LongArrayVector(new long[] { 1, 2, 4 }, 3).asBlock(), - LongBlock.newConstantBlockWith(9, 2), - LongBlock.newBlockBuilder(2).appendLong(1).appendLong(2).build().filter(1), - LongBlock.newBlockBuilder(3).appendLong(1).appendLong(2).appendLong(5).build(), - LongBlock.newBlockBuilder(1).appendLong(1).appendLong(2).appendLong(3).appendLong(4).build(), - LongBlock.newBlockBuilder(1).appendLong(1).appendNull().build(), - LongBlock.newBlockBuilder(1).appendLong(1).appendNull().appendLong(3).build(), - LongBlock.newBlockBuilder(1).appendLong(1).appendLong(3).build(), - LongBlock.newBlockBuilder(3).appendLong(1).beginPositionEntry().appendLong(2).appendLong(3).build() + blockFactory.newLongArrayVector(new long[] { 1 }, 1).asBlock(), + blockFactory.newLongArrayVector(new long[] { 9 }, 1).asBlock(), + blockFactory.newLongArrayVector(new long[] { 1, 2 }, 2).asBlock(), + blockFactory.newLongArrayVector(new long[] { 1, 2, 3 }, 3).asBlock(), + blockFactory.newLongArrayVector(new long[] { 1, 2, 4 }, 3).asBlock(), + blockFactory.newConstantLongBlockWith(9, 2), + blockFactory.newLongBlockBuilder(2).appendLong(1).appendLong(2).build().filter(1), + blockFactory.newLongBlockBuilder(3).appendLong(1).appendLong(2).appendLong(5).build(), + blockFactory.newLongBlockBuilder(1).appendLong(1).appendLong(2).appendLong(3).appendLong(4).build(), + blockFactory.newLongBlockBuilder(1).appendLong(1).appendNull().build(), + blockFactory.newLongBlockBuilder(1).appendLong(1).appendNull().appendLong(3).build(), + blockFactory.newLongBlockBuilder(1).appendLong(1).appendLong(3).build(), + blockFactory.newLongBlockBuilder(3).appendLong(1).beginPositionEntry().appendLong(2).appendLong(3).build() ); assertAllNotEquals(notEqualBlocks); } public void testSimpleBlockWithSingleNull() { List blocks = List.of( - LongBlock.newBlockBuilder(1).appendLong(1).appendNull().appendLong(3).build(), - LongBlock.newBlockBuilder(1).appendLong(1).appendNull().appendLong(3).build() + blockFactory.newLongBlockBuilder(1).appendLong(1).appendNull().appendLong(3).build(), + blockFactory.newLongBlockBuilder(1).appendLong(1).appendNull().appendLong(3).build() ); assertEquals(3, blocks.get(0).getPositionCount()); assertTrue(blocks.get(0).isNull(1)); @@ -190,8 +198,8 @@ public void testSimpleBlockWithSingleNull() { public void testSimpleBlockWithManyNulls() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - LongBlock.Builder builder1 = LongBlock.newBlockBuilder(grow ? 0 : positions); - LongBlock.Builder builder2 = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder1 = blockFactory.newLongBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder2 = blockFactory.newLongBlockBuilder(grow ? 0 : positions); for (int p = 0; p < positions; p++) { builder1.appendNull(); builder2.appendNull(); @@ -208,8 +216,8 @@ public void testSimpleBlockWithManyNulls() { public void testSimpleBlockWithSingleMultiValue() { List blocks = List.of( - LongBlock.newBlockBuilder(1).beginPositionEntry().appendLong(1).appendLong(2).build(), - LongBlock.newBlockBuilder(1).beginPositionEntry().appendLong(1).appendLong(2).build() + blockFactory.newLongBlockBuilder(1).beginPositionEntry().appendLong(1).appendLong(2).build(), + blockFactory.newLongBlockBuilder(1).beginPositionEntry().appendLong(1).appendLong(2).build() ); assertEquals(1, blocks.get(0).getPositionCount()); assertEquals(2, blocks.get(0).getValueCount(0)); @@ -219,9 +227,9 @@ public void testSimpleBlockWithSingleMultiValue() { public void testSimpleBlockWithManyMultiValues() { int positions = randomIntBetween(1, 256); boolean grow = randomBoolean(); - LongBlock.Builder builder1 = LongBlock.newBlockBuilder(grow ? 0 : positions); - LongBlock.Builder builder2 = LongBlock.newBlockBuilder(grow ? 0 : positions); - LongBlock.Builder builder3 = LongBlock.newBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder1 = blockFactory.newLongBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder2 = blockFactory.newLongBlockBuilder(grow ? 0 : positions); + LongBlock.Builder builder3 = blockFactory.newLongBlockBuilder(grow ? 0 : positions); for (int pos = 0; pos < positions; pos++) { builder1.beginPositionEntry(); builder2.beginPositionEntry(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java index 35623b93357df..28ac8e18c85d8 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MockBlockFactory.java @@ -10,6 +10,7 @@ import org.apache.lucene.tests.util.LuceneTestCase; import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.BytesRefArray; import org.elasticsearch.compute.data.Block.MvOrdering; @@ -31,7 +32,9 @@ public class MockBlockFactory extends BlockFactory { static final boolean TRACK_ALLOCATIONS = true; static Object trackDetail() { - return TRACK_ALLOCATIONS ? new RuntimeException("Block allocated from test: " + LuceneTestCase.getTestClass().getName()) : true; + return TRACK_ALLOCATIONS + ? new RuntimeException("Releasable allocated from test: " + LuceneTestCase.getTestClass().getName()) + : true; } final ConcurrentMap TRACKED_BLOCKS = new ConcurrentHashMap<>(); @@ -49,7 +52,7 @@ public void ensureAllBlocksAreReleased() { Iterator causes = copy.values().iterator(); Object firstCause = causes.next(); RuntimeException exception = new RuntimeException( - copy.size() + " blocks have not been released", + copy.size() + " releasables have not been released", firstCause instanceof Throwable ? (Throwable) firstCause : null ); while (causes.hasNext()) { @@ -63,11 +66,15 @@ public void ensureAllBlocksAreReleased() { } public MockBlockFactory(CircuitBreaker breaker, BigArrays bigArrays) { - this(breaker, bigArrays, null); + this(breaker, bigArrays, BlockFactory.DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE); } - protected MockBlockFactory(CircuitBreaker breaker, BigArrays bigArrays, BlockFactory parent) { - super(breaker, bigArrays, parent); + public MockBlockFactory(CircuitBreaker breaker, BigArrays bigArrays, ByteSizeValue maxPrimitiveArraySize) { + this(breaker, bigArrays, maxPrimitiveArraySize, null); + } + + public MockBlockFactory(CircuitBreaker breaker, BigArrays bigArrays, ByteSizeValue maxPrimitiveArraySize, BlockFactory parent) { + super(breaker, bigArrays, maxPrimitiveArraySize, parent); } @Override @@ -75,7 +82,7 @@ public BlockFactory newChildFactory(LocalCircuitBreaker childBreaker) { if (childBreaker.parentBreaker() != breaker()) { throw new IllegalStateException("Different parent breaker"); } - return new MockBlockFactory(childBreaker, bigArrays(), this); + return new MockBlockFactory(childBreaker, bigArrays(), ByteSizeValue.ofBytes(maxPrimitiveArrayBytes()), this); } @Override @@ -116,7 +123,7 @@ void purgeTrackBlocks() { TRACKED_BLOCKS.remove(vecBuilder); } } else if (b instanceof Vector vector) { - if (vector.asBlock().isReleased()) { + if (vector.isReleased()) { TRACKED_BLOCKS.remove(vector); } } else { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java index f067999a04ff1..d3572377912ac 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/MultiValueBlockTests.java @@ -23,7 +23,7 @@ public class MultiValueBlockTests extends SerializationTestCase { public void testIntBlockTrivial1() { - var blockBuilder = IntBlock.newBlockBuilder(4); + var blockBuilder = blockFactory.newIntBlockBuilder(4); blockBuilder.appendInt(10); blockBuilder.beginPositionEntry(); blockBuilder.appendInt(21); @@ -54,10 +54,11 @@ public void testIntBlockTrivial1() { // cannot get a Vector view assertNull(block.asVector()); EqualsHashCodeTestUtils.checkEqualsAndHashCode(block, this::serializeDeserializeBlock, null, Releasable::close); + block.close(); } public void testIntBlockTrivial() { - var blockBuilder = IntBlock.newBlockBuilder(10); + var blockBuilder = blockFactory.newIntBlockBuilder(10); blockBuilder.appendInt(1); blockBuilder.beginPositionEntry(); blockBuilder.appendInt(21); @@ -79,57 +80,66 @@ public void testIntBlockTrivial() { assertThat(block.getInt(block.getFirstValueIndex(0)), is(1)); assertNull(block.asVector()); EqualsHashCodeTestUtils.checkEqualsAndHashCode(block, this::serializeDeserializeBlock, null, Releasable::close); + block.close(); } public void testEmpty() { for (int initialSize : new int[] { 0, 10, 100, randomInt(512) }) { - IntBlock intBlock = IntBlock.newBlockBuilder(initialSize).build(); + IntBlock intBlock = blockFactory.newIntBlockBuilder(initialSize).build(); assertThat(intBlock.getPositionCount(), is(0)); assertThat(intBlock.asVector(), is(notNullValue())); EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); + intBlock.close(); - LongBlock longBlock = LongBlock.newBlockBuilder(initialSize).build(); + LongBlock longBlock = blockFactory.newLongBlockBuilder(initialSize).build(); assertThat(longBlock.getPositionCount(), is(0)); assertThat(longBlock.asVector(), is(notNullValue())); EqualsHashCodeTestUtils.checkEqualsAndHashCode(longBlock, this::serializeDeserializeBlock, null, Releasable::close); + longBlock.close(); - DoubleBlock doubleBlock = DoubleBlock.newBlockBuilder(initialSize).build(); + DoubleBlock doubleBlock = blockFactory.newDoubleBlockBuilder(initialSize).build(); assertThat(doubleBlock.getPositionCount(), is(0)); assertThat(doubleBlock.asVector(), is(notNullValue())); EqualsHashCodeTestUtils.checkEqualsAndHashCode(doubleBlock, this::serializeDeserializeBlock, null, Releasable::close); + doubleBlock.close(); - BytesRefBlock bytesRefBlock = BytesRefBlock.newBlockBuilder(initialSize).build(); + BytesRefBlock bytesRefBlock = blockFactory.newBytesRefBlockBuilder(initialSize).build(); assertThat(bytesRefBlock.getPositionCount(), is(0)); assertThat(bytesRefBlock.asVector(), is(notNullValue())); EqualsHashCodeTestUtils.checkEqualsAndHashCode(bytesRefBlock, this::serializeDeserializeBlock, null, Releasable::close); + bytesRefBlock.close(); } } public void testNullOnly() throws IOException { for (int initialSize : new int[] { 0, 10, 100, randomInt(512) }) { - IntBlock intBlock = IntBlock.newBlockBuilder(initialSize).appendNull().build(); + IntBlock intBlock = blockFactory.newIntBlockBuilder(initialSize).appendNull().build(); assertThat(intBlock.getPositionCount(), is(1)); assertThat(intBlock.getValueCount(0), is(0)); assertNull(intBlock.asVector()); EqualsHashCodeTestUtils.checkEqualsAndHashCode(intBlock, this::serializeDeserializeBlock, null, Releasable::close); + intBlock.close(); - LongBlock longBlock = LongBlock.newBlockBuilder(initialSize).appendNull().build(); + LongBlock longBlock = blockFactory.newLongBlockBuilder(initialSize).appendNull().build(); assertThat(longBlock.getPositionCount(), is(1)); assertThat(longBlock.getValueCount(0), is(0)); assertNull(longBlock.asVector()); EqualsHashCodeTestUtils.checkEqualsAndHashCode(longBlock, this::serializeDeserializeBlock, null, Releasable::close); + longBlock.close(); - DoubleBlock doubleBlock = DoubleBlock.newBlockBuilder(initialSize).appendNull().build(); + DoubleBlock doubleBlock = blockFactory.newDoubleBlockBuilder(initialSize).appendNull().build(); assertThat(doubleBlock.getPositionCount(), is(1)); assertThat(doubleBlock.getValueCount(0), is(0)); assertNull(doubleBlock.asVector()); EqualsHashCodeTestUtils.checkEqualsAndHashCode(doubleBlock, this::serializeDeserializeBlock, null, Releasable::close); + doubleBlock.close(); - BytesRefBlock bytesRefBlock = BytesRefBlock.newBlockBuilder(initialSize).appendNull().build(); + BytesRefBlock bytesRefBlock = blockFactory.newBytesRefBlockBuilder(initialSize).appendNull().build(); assertThat(bytesRefBlock.getPositionCount(), is(1)); assertThat(bytesRefBlock.getValueCount(0), is(0)); assertNull(bytesRefBlock.asVector()); EqualsHashCodeTestUtils.checkEqualsAndHashCode(bytesRefBlock, this::serializeDeserializeBlock, null, Releasable::close); + bytesRefBlock.close(); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java index b0666e89cf79e..7d3e00845284a 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/SerializationTestCase.java @@ -28,7 +28,7 @@ public abstract class SerializationTestCase extends ESTestCase { BigArrays bigArrays; - private BlockFactory blockFactory; + protected BlockFactory blockFactory; NamedWriteableRegistry registry = new NamedWriteableRegistry(Block.getNamedWriteables()); @Before diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java index d9377a490368d..a2b074c1403a0 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockBuilder.java @@ -29,7 +29,7 @@ public abstract class TestBlockBuilder implements Block.Builder { public abstract TestBlockBuilder endPositionEntry(); public static Block blockFromValues(List> blockValues, ElementType elementType) { - TestBlockBuilder builder = builderOf(elementType); + TestBlockBuilder builder = builderOf(TestBlockFactory.getNonBreakingInstance(), elementType); for (List rowValues : blockValues) { if (rowValues.isEmpty()) { builder.appendNull(); @@ -47,7 +47,7 @@ public static Block blockFromValues(List> blockValues, ElementType // Builds a block of single values. Each value can be null or non-null. // Differs from blockFromValues, as it does not use begin/endPositionEntry public static Block blockFromSingleValues(List blockValues, ElementType elementType) { - TestBlockBuilder builder = builderOf(elementType); + TestBlockBuilder builder = builderOf(TestBlockFactory.getNonBreakingInstance(), elementType); for (Object rowValue : blockValues) { if (rowValue == null) { builder.appendNull(); @@ -58,39 +58,23 @@ public static Block blockFromSingleValues(List blockValues, ElementType return builder.build(); } - static TestBlockBuilder builderOf(ElementType type) { + static TestBlockBuilder builderOf(BlockFactory blockFactory, ElementType type) { return switch (type) { - case INT -> new TestIntBlockBuilder(0); - case LONG -> new TestLongBlockBuilder(0); - case DOUBLE -> new TestDoubleBlockBuilder(0); - case BYTES_REF -> new TestBytesRefBlockBuilder(0); - case BOOLEAN -> new TestBooleanBlockBuilder(0); + case INT -> new TestIntBlockBuilder(blockFactory, 0); + case LONG -> new TestLongBlockBuilder(blockFactory, 0); + case DOUBLE -> new TestDoubleBlockBuilder(blockFactory, 0); + case BYTES_REF -> new TestBytesRefBlockBuilder(blockFactory, 0); + case BOOLEAN -> new TestBooleanBlockBuilder(blockFactory, 0); default -> throw new AssertionError(type); }; } - static TestBlockBuilder ofInt(int estimatedSize) { - return new TestIntBlockBuilder(estimatedSize); - } - - static TestBlockBuilder ofLong(int estimatedSize) { - return new TestLongBlockBuilder(estimatedSize); - } - - static TestBlockBuilder ofDouble(int estimatedSize) { - return new TestDoubleBlockBuilder(estimatedSize); - } - - static TestBlockBuilder ofBytesRef(int estimatedSize) { - return new TestBytesRefBlockBuilder(estimatedSize); - } - private static class TestIntBlockBuilder extends TestBlockBuilder { private final IntBlock.Builder builder; - TestIntBlockBuilder(int estimatedSize) { - builder = IntBlock.newBlockBuilder(estimatedSize); + TestIntBlockBuilder(BlockFactory blockFactory, int estimatedSize) { + builder = blockFactory.newIntBlockBuilder(estimatedSize); } @Override @@ -150,8 +134,8 @@ private static class TestLongBlockBuilder extends TestBlockBuilder { private final LongBlock.Builder builder; - TestLongBlockBuilder(int estimatedSize) { - builder = LongBlock.newBlockBuilder(estimatedSize); + TestLongBlockBuilder(BlockFactory blockFactory, int estimatedSize) { + builder = blockFactory.newLongBlockBuilder(estimatedSize); } @Override @@ -211,8 +195,8 @@ private static class TestDoubleBlockBuilder extends TestBlockBuilder { private final DoubleBlock.Builder builder; - TestDoubleBlockBuilder(int estimatedSize) { - builder = DoubleBlock.newBlockBuilder(estimatedSize); + TestDoubleBlockBuilder(BlockFactory blockFactory, int estimatedSize) { + builder = blockFactory.newDoubleBlockBuilder(estimatedSize); } @Override @@ -272,8 +256,8 @@ private static class TestBytesRefBlockBuilder extends TestBlockBuilder { private final BytesRefBlock.Builder builder; - TestBytesRefBlockBuilder(int estimatedSize) { - builder = BytesRefBlock.newBlockBuilder(estimatedSize); + TestBytesRefBlockBuilder(BlockFactory blockFactory, int estimatedSize) { + builder = blockFactory.newBytesRefBlockBuilder(estimatedSize); } @Override @@ -333,8 +317,8 @@ private static class TestBooleanBlockBuilder extends TestBlockBuilder { private final BooleanBlock.Builder builder; - TestBooleanBlockBuilder(int estimatedSize) { - builder = BooleanBlock.newBlockBuilder(estimatedSize); + TestBooleanBlockBuilder(BlockFactory blockFactory, int estimatedSize) { + builder = blockFactory.newBooleanBlockBuilder(estimatedSize); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockFactory.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockFactory.java new file mode 100644 index 0000000000000..5b7072ab6476d --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/TestBlockFactory.java @@ -0,0 +1,26 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.util.BigArrays; + +public class TestBlockFactory { + + private static final BlockFactory NON_BREAKING = BlockFactory.getInstance( + new NoopCircuitBreaker("test-noop"), + BigArrays.NON_RECYCLING_INSTANCE + ); + + /** + * Returns the Non-Breaking block factory. + */ + public static BlockFactory getNonBreakingInstance() { + return NON_BREAKING; + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java index 04ccf47ea6122..096db174a2580 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorBuilderTests.java @@ -114,11 +114,11 @@ public void testCranky() { private Vector.Builder vectorBuilder(int estimatedSize, BlockFactory blockFactory) { return switch (elementType) { case NULL, DOC, UNKNOWN -> throw new UnsupportedOperationException(); - case BOOLEAN -> BooleanVector.newVectorBuilder(estimatedSize, blockFactory); - case BYTES_REF -> BytesRefVector.newVectorBuilder(estimatedSize, blockFactory); - case DOUBLE -> DoubleVector.newVectorBuilder(estimatedSize, blockFactory); - case INT -> IntVector.newVectorBuilder(estimatedSize, blockFactory); - case LONG -> LongVector.newVectorBuilder(estimatedSize, blockFactory); + case BOOLEAN -> blockFactory.newBooleanVectorBuilder(estimatedSize); + case BYTES_REF -> blockFactory.newBytesRefVectorBuilder(estimatedSize); + case DOUBLE -> blockFactory.newDoubleVectorBuilder(estimatedSize); + case INT -> blockFactory.newIntVectorBuilder(estimatedSize); + case LONG -> blockFactory.newLongVectorBuilder(estimatedSize); }; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java index 3c46fef7e5257..cdfc7611ec678 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/data/VectorFixedBuilderTests.java @@ -116,10 +116,10 @@ public void testCranky() { private Vector.Builder vectorBuilder(int size, BlockFactory blockFactory) { return switch (elementType) { case NULL, BYTES_REF, DOC, UNKNOWN -> throw new UnsupportedOperationException(); - case BOOLEAN -> BooleanVector.newVectorFixedBuilder(size, blockFactory); - case DOUBLE -> DoubleVector.newVectorFixedBuilder(size, blockFactory); - case INT -> IntVector.newVectorFixedBuilder(size, blockFactory); - case LONG -> LongVector.newVectorFixedBuilder(size, blockFactory); + case BOOLEAN -> blockFactory.newBooleanVectorFixedBuilder(size); + case DOUBLE -> blockFactory.newDoubleVectorFixedBuilder(size); + case INT -> blockFactory.newIntVectorFixedBuilder(size); + case LONG -> blockFactory.newLongVectorFixedBuilder(size); }; } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java index d6edc903607cc..9acf188a4010d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneCountOperatorTests.java @@ -11,7 +11,6 @@ import org.apache.lucene.document.LongPoint; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.NoMergePolicy; -import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.store.Directory; @@ -27,15 +26,11 @@ import org.elasticsearch.compute.operator.OperatorTestCase; import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; -import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; -import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.indices.CrankyCircuitBreakerService; -import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.junit.After; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.List; import java.util.concurrent.CopyOnWriteArrayList; @@ -44,7 +39,6 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.lessThanOrEqualTo; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class LuceneCountOperatorTests extends AnyOperatorTestCase { @@ -89,10 +83,8 @@ private LuceneCountOperator.Factory simple(BigArrays bigArrays, DataPartitioning throw new RuntimeException(e); } - SearchContext ctx = mockSearchContext(reader); - SearchExecutionContext ectx = mock(SearchExecutionContext.class); - when(ctx.getSearchExecutionContext()).thenReturn(ectx); - when(ectx.getIndexReader()).thenReturn(reader); + SearchContext ctx = LuceneSourceOperatorTests.mockSearchContext(reader); + when(ctx.getSearchExecutionContext().getIndexReader()).thenReturn(reader); final Query query; if (enableShortcut && randomBoolean()) { query = new MatchAllDocsQuery(); @@ -185,25 +177,4 @@ private void testCount(Supplier contexts, int size, int limit) { assertThat(totalCount, equalTo((long) size)); } } - - /** - * Creates a mock search context with the given index reader. - * The returned mock search context can be used to test with {@link LuceneOperator}. - */ - public static SearchContext mockSearchContext(IndexReader reader) { - try { - ContextIndexSearcher searcher = new ContextIndexSearcher( - reader, - IndexSearcher.getDefaultSimilarity(), - IndexSearcher.getDefaultQueryCache(), - TrivialQueryCachingPolicy.NEVER, - true - ); - SearchContext searchContext = mock(SearchContext.class); - when(searchContext.searcher()).thenReturn(searcher); - return searchContext; - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java index fad1f793122d8..eab3e855d01ab 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorStatusTests.java @@ -12,20 +12,39 @@ import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.elasticsearch.test.ESTestCase; +import java.util.List; +import java.util.Set; +import java.util.TreeSet; + import static org.hamcrest.Matchers.equalTo; public class LuceneSourceOperatorStatusTests extends AbstractWireSerializingTestCase { public static LuceneSourceOperator.Status simple() { - return new LuceneSourceOperator.Status(0, 0, 1, 5, 123, 99990, 8000); + return new LuceneSourceOperator.Status(2, Set.of("*:*"), new TreeSet<>(List.of("a:0", "a:1")), 0, 1, 5, 123, 99990, 8000); } public static String simpleToJson() { return """ - {"processed_slices":0,"slice_index":0,"total_slices":1,"pages_emitted":5,"slice_min":123,"slice_max":99990,"current":8000}"""; + { + "processed_slices" : 2, + "processed_queries" : [ + "*:*" + ], + "processed_shards" : [ + "a:0", + "a:1" + ], + "slice_index" : 0, + "total_slices" : 1, + "pages_emitted" : 5, + "slice_min" : 123, + "slice_max" : 99990, + "current" : 8000 + }"""; } public void testToXContent() { - assertThat(Strings.toString(simple()), equalTo(simpleToJson())); + assertThat(Strings.toString(simple(), true, true), equalTo(simpleToJson())); } @Override @@ -37,6 +56,8 @@ protected Writeable.Reader instanceReader() { public LuceneSourceOperator.Status createTestInstance() { return new LuceneSourceOperator.Status( randomNonNegativeInt(), + randomProcessedQueries(), + randomProcessedShards(), randomNonNegativeInt(), randomNonNegativeInt(), randomNonNegativeInt(), @@ -46,26 +67,58 @@ public LuceneSourceOperator.Status createTestInstance() { ); } + private static Set randomProcessedQueries() { + int size = between(0, 10); + Set set = new TreeSet<>(); + while (set.size() < size) { + set.add(randomAlphaOfLength(5)); + } + return set; + } + + private static Set randomProcessedShards() { + int size = between(0, 10); + Set set = new TreeSet<>(); + while (set.size() < size) { + set.add(randomAlphaOfLength(3) + ":" + between(0, 10)); + } + return set; + } + @Override protected LuceneSourceOperator.Status mutateInstance(LuceneSourceOperator.Status instance) { int processedSlices = instance.processedSlices(); + Set processedQueries = instance.processedQueries(); + Set processedShards = instance.processedShards(); int sliceIndex = instance.sliceIndex(); int totalSlices = instance.totalSlices(); int pagesEmitted = instance.pagesEmitted(); int sliceMin = instance.sliceMin(); int sliceMax = instance.sliceMax(); int current = instance.current(); - switch (between(0, 6)) { + switch (between(0, 8)) { case 0 -> processedSlices = randomValueOtherThan(processedSlices, ESTestCase::randomNonNegativeInt); - case 1 -> sliceIndex = randomValueOtherThan(sliceIndex, ESTestCase::randomNonNegativeInt); - case 2 -> totalSlices = randomValueOtherThan(totalSlices, ESTestCase::randomNonNegativeInt); - case 3 -> pagesEmitted = randomValueOtherThan(pagesEmitted, ESTestCase::randomNonNegativeInt); - case 4 -> sliceMin = randomValueOtherThan(sliceMin, ESTestCase::randomNonNegativeInt); - case 5 -> sliceMax = randomValueOtherThan(sliceMax, ESTestCase::randomNonNegativeInt); - case 6 -> current = randomValueOtherThan(current, ESTestCase::randomNonNegativeInt); + case 1 -> processedQueries = randomValueOtherThan(processedQueries, LuceneSourceOperatorStatusTests::randomProcessedQueries); + case 2 -> processedQueries = randomValueOtherThan(processedShards, LuceneSourceOperatorStatusTests::randomProcessedShards); + case 3 -> sliceIndex = randomValueOtherThan(sliceIndex, ESTestCase::randomNonNegativeInt); + case 4 -> totalSlices = randomValueOtherThan(totalSlices, ESTestCase::randomNonNegativeInt); + case 5 -> pagesEmitted = randomValueOtherThan(pagesEmitted, ESTestCase::randomNonNegativeInt); + case 6 -> sliceMin = randomValueOtherThan(sliceMin, ESTestCase::randomNonNegativeInt); + case 7 -> sliceMax = randomValueOtherThan(sliceMax, ESTestCase::randomNonNegativeInt); + case 8 -> current = randomValueOtherThan(current, ESTestCase::randomNonNegativeInt); default -> throw new UnsupportedOperationException(); } ; - return new LuceneSourceOperator.Status(processedSlices, sliceIndex, totalSlices, pagesEmitted, sliceMin, sliceMax, current); + return new LuceneSourceOperator.Status( + processedSlices, + processedQueries, + processedShards, + sliceIndex, + totalSlices, + pagesEmitted, + sliceMin, + sliceMax, + current + ); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java index 41fe1a93d9c8b..74e9d7b122718 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneSourceOperatorTests.java @@ -27,6 +27,7 @@ import org.elasticsearch.compute.operator.OperatorTestCase; import org.elasticsearch.compute.operator.TestResultPageSinkOperator; import org.elasticsearch.core.IOUtils; +import org.elasticsearch.index.Index; import org.elasticsearch.index.cache.query.TrivialQueryCachingPolicy; import org.elasticsearch.index.fielddata.FieldDataContext; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -96,23 +97,21 @@ private LuceneSourceOperator.Factory simple(BigArrays bigArrays, DataPartitionin } SearchContext ctx = mockSearchContext(reader); - SearchExecutionContext ectx = mock(SearchExecutionContext.class); - when(ctx.getSearchExecutionContext()).thenReturn(ectx); - when(ectx.getFieldType(anyString())).thenAnswer(inv -> { + when(ctx.getSearchExecutionContext().getFieldType(anyString())).thenAnswer(inv -> { String name = inv.getArgument(0); return switch (name) { case "s" -> S_FIELD; default -> throw new IllegalArgumentException("don't support [" + name + "]"); }; }); - when(ectx.getForField(any(), any())).thenAnswer(inv -> { + when(ctx.getSearchExecutionContext().getForField(any(), any())).thenAnswer(inv -> { MappedFieldType ft = inv.getArgument(0); IndexFieldData.Builder builder = ft.fielddataBuilder(FieldDataContext.noRuntimeFields("test")); return builder.build(new IndexFieldDataCache.None(), bigArrays.breakerService()); }); - when(ectx.nestedScope()).thenReturn(new NestedScope()); - when(ectx.nestedLookup()).thenReturn(NestedLookup.EMPTY); - when(ectx.getIndexReader()).thenReturn(reader); + when(ctx.getSearchExecutionContext().nestedScope()).thenReturn(new NestedScope()); + when(ctx.getSearchExecutionContext().nestedLookup()).thenReturn(NestedLookup.EMPTY); + when(ctx.getSearchExecutionContext().getIndexReader()).thenReturn(reader); Function queryFunction = c -> new MatchAllDocsQuery(); int maxPageSize = between(10, Math.max(10, numDocs)); return new LuceneSourceOperator.Factory(List.of(ctx), queryFunction, dataPartitioning, 1, maxPageSize, limit); @@ -216,6 +215,10 @@ public static SearchContext mockSearchContext(IndexReader reader) { ); SearchContext searchContext = mock(SearchContext.class); when(searchContext.searcher()).thenReturn(searcher); + SearchExecutionContext searchExecutionContext = mock(SearchExecutionContext.class); + when(searchContext.getSearchExecutionContext()).thenReturn(searchExecutionContext); + when(searchExecutionContext.getFullyQualifiedIndex()).thenReturn(new Index("test", "uid")); + when(searchExecutionContext.getShardId()).thenReturn(0); return searchContext; } catch (IOException e) { throw new UncheckedIOException(e); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java index d1b9e706750df..445e3e0f80264 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/LuceneTopNSourceOperatorTests.java @@ -32,7 +32,6 @@ import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NestedLookup; import org.elasticsearch.index.mapper.NumberFieldMapper; -import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.index.query.support.NestedScope; import org.elasticsearch.indices.CrankyCircuitBreakerService; import org.elasticsearch.search.internal.SearchContext; @@ -49,7 +48,6 @@ import static org.hamcrest.Matchers.hasSize; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; -import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class LuceneTopNSourceOperatorTests extends AnyOperatorTestCase { @@ -90,23 +88,21 @@ private LuceneTopNSourceOperator.Factory simple(BigArrays bigArrays, DataPartiti } SearchContext ctx = LuceneSourceOperatorTests.mockSearchContext(reader); - SearchExecutionContext ectx = mock(SearchExecutionContext.class); - when(ctx.getSearchExecutionContext()).thenReturn(ectx); - when(ectx.getFieldType(anyString())).thenAnswer(inv -> { + when(ctx.getSearchExecutionContext().getFieldType(anyString())).thenAnswer(inv -> { String name = inv.getArgument(0); return switch (name) { case "s" -> S_FIELD; default -> throw new IllegalArgumentException("don't support [" + name + "]"); }; }); - when(ectx.getForField(any(), any())).thenAnswer(inv -> { + when(ctx.getSearchExecutionContext().getForField(any(), any())).thenAnswer(inv -> { MappedFieldType ft = inv.getArgument(0); IndexFieldData.Builder builder = ft.fielddataBuilder(FieldDataContext.noRuntimeFields("test")); return builder.build(new IndexFieldDataCache.None(), bigArrays.breakerService()); }); - when(ectx.nestedScope()).thenReturn(new NestedScope()); - when(ectx.nestedLookup()).thenReturn(NestedLookup.EMPTY); - when(ectx.getIndexReader()).thenReturn(reader); + when(ctx.getSearchExecutionContext().nestedScope()).thenReturn(new NestedScope()); + when(ctx.getSearchExecutionContext().nestedLookup()).thenReturn(NestedLookup.EMPTY); + when(ctx.getSearchExecutionContext().getIndexReader()).thenReturn(reader); Function queryFunction = c -> new MatchAllDocsQuery(); int taskConcurrency = 0; int maxPageSize = between(10, Math.max(10, size)); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorStatusTests.java index 6f0317b509e3b..1851f7ac948cc 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/lucene/ValuesSourceReaderOperatorStatusTests.java @@ -24,11 +24,16 @@ public static ValuesSourceReaderOperator.Status simple() { public static String simpleToJson() { return """ - {"readers_built":{"ReaderType":3},"pages_processed":123}"""; + { + "readers_built" : { + "ReaderType" : 3 + }, + "pages_processed" : 123 + }"""; } public void testToXContent() { - assertThat(Strings.toString(simple()), equalTo(simpleToJson())); + assertThat(Strings.toString(simple(), true, true), equalTo(simpleToJson())); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java index 290756e81cfae..3495300f1bc9b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/AnyOperatorTestCase.java @@ -7,22 +7,9 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.common.breaker.CircuitBreaker; -import org.elasticsearch.common.breaker.CircuitBreakingException; -import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.common.util.MockBigArrays; -import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.aggregation.GroupingAggregatorFunction; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.MockBlockFactory; -import org.elasticsearch.indices.CrankyCircuitBreakerService; -import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; -import org.elasticsearch.test.ESTestCase; -import org.junit.After; - -import java.util.ArrayList; -import java.util.List; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.matchesPattern; @@ -30,7 +17,7 @@ /** * Superclass for testing any {@link Operator}, including {@link SourceOperator}s. */ -public abstract class AnyOperatorTestCase extends ESTestCase { +public abstract class AnyOperatorTestCase extends ComputeTestCase { /** * The operator configured a "simple" or basic way, used for smoke testing * descriptions and {@link BigArrays} and scatter/gather. @@ -87,58 +74,16 @@ public final void testSimpleToString() { } } - /** - * A {@link BigArrays} that won't throw {@link CircuitBreakingException}. - *

- * Rather than using the {@link NoneCircuitBreakerService} we use a - * very large limit so tests can call {@link CircuitBreaker#getUsed()}. - *

- */ - protected final BigArrays nonBreakingBigArrays() { - return new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofBytes(Integer.MAX_VALUE)).withCircuitBreaking(); - } - /** * A {@link DriverContext} with a nonBreakingBigArrays. */ protected DriverContext driverContext() { // TODO make this final once all operators support memory tracking - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)).withCircuitBreaking(); - CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); - breakers.add(breaker); - BlockFactory factory = new MockBlockFactory(breaker, bigArrays); - blockFactories.add(factory); - return new DriverContext(bigArrays, factory); - } - - protected final DriverContext nonBreakingDriverContext() { // TODO drop this once the driverContext method isn't overrideable - return new DriverContext(nonBreakingBigArrays(), BlockFactory.getNonBreakingInstance()); + BlockFactory blockFactory = blockFactory(); + return new DriverContext(blockFactory.bigArrays(), blockFactory); } - private final List breakers = new ArrayList<>(); - private final List blockFactories = new ArrayList<>(); - protected final DriverContext crankyDriverContext() { - CrankyCircuitBreakerService cranky = new CrankyCircuitBreakerService(); - BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, cranky).withCircuitBreaking(); - CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); - breakers.add(breaker); - BlockFactory blockFactory = new MockBlockFactory(breaker, bigArrays); - blockFactories.add(blockFactory); - return new DriverContext(bigArrays, blockFactory); - } - - @After - public void allBreakersEmpty() throws Exception { - // first check that all big arrays are released, which can affect breakers - MockBigArrays.ensureAllArraysAreReleased(); - - for (CircuitBreaker breaker : breakers) { - for (var factory : blockFactories) { - if (factory instanceof MockBlockFactory mockBlockFactory) { - mockBlockFactory.ensureAllBlocksAreReleased(); - } - } - assertThat("Unexpected used in breaker: " + breaker, breaker.getUsed(), equalTo(0L)); - } + BlockFactory blockFactory = crankyBlockFactory(); + return new DriverContext(blockFactory.bigArrays(), blockFactory); } } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java index 47febc09e45f5..01f51b32edb1d 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/CannedSourceOperator.java @@ -9,7 +9,9 @@ import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; @@ -47,7 +49,7 @@ public static Page mergePages(List pages) { Block.Builder[] builders = new Block.Builder[first.getBlockCount()]; try { for (int b = 0; b < builders.length; b++) { - builders[b] = first.getBlock(b).elementType().newBlockBuilder(totalPositions); + builders[b] = first.getBlock(b).elementType().newBlockBuilder(totalPositions, TestBlockFactory.getNonBreakingInstance()); } for (Page p : pages) { for (int b = 0; b < builders.length; b++) { @@ -79,11 +81,12 @@ public static Page mergePages(List pages) { */ public static List deepCopyOf(List pages) { List out = new ArrayList<>(pages.size()); + BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); for (Page p : pages) { Block[] blocks = new Block[p.getBlockCount()]; for (int b = 0; b < blocks.length; b++) { Block orig = p.getBlock(b); - Block.Builder builder = orig.elementType().newBlockBuilder(p.getPositionCount()); + Block.Builder builder = orig.elementType().newBlockBuilder(p.getPositionCount(), blockFactory); builder.copyFrom(orig, 0, p.getPositionCount()); blocks[b] = builder.build(); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ComputeTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ComputeTestCase.java new file mode 100644 index 0000000000000..ce62fb9896eba --- /dev/null +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ComputeTestCase.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.operator; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.MockBlockFactory; +import org.elasticsearch.indices.CrankyCircuitBreakerService; +import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; +import org.elasticsearch.test.ESTestCase; +import org.junit.After; + +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +/** + * Superclass for testing with blocks and operators + */ +public abstract class ComputeTestCase extends ESTestCase { + + private final List breakers = new ArrayList<>(); + private final List blockFactories = new ArrayList<>(); + + /** + * A {@link BigArrays} that won't throw {@link CircuitBreakingException}. + *

+ * Rather than using the {@link NoneCircuitBreakerService} we use a + * very large limit so tests can call {@link CircuitBreaker#getUsed()}. + *

+ */ + protected final BigArrays nonBreakingBigArrays() { + return new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofBytes(Integer.MAX_VALUE)).withCircuitBreaking(); + } + + /** + * Build a {@link BlockFactory} with a huge limit. + */ + protected final BlockFactory blockFactory() { + return blockFactory(ByteSizeValue.ofGb(1)); + } + + /** + * Build a {@link BlockFactory} with a configured limit. + */ + protected final BlockFactory blockFactory(ByteSizeValue limit) { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, limit).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + breakers.add(breaker); + BlockFactory factory = new MockBlockFactory(breaker, bigArrays); + blockFactories.add(factory); + return factory; + } + + /** + * Build a {@link BlockFactory} that randomly fails. + */ + protected final BlockFactory crankyBlockFactory() { + CrankyCircuitBreakerService cranky = new CrankyCircuitBreakerService(); + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, cranky).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + breakers.add(breaker); + BlockFactory blockFactory = new MockBlockFactory(breaker, bigArrays); + blockFactories.add(blockFactory); + return blockFactory; + } + + @After + public final void allBreakersEmpty() throws Exception { + // first check that all big arrays are released, which can affect breakers + MockBigArrays.ensureAllArraysAreReleased(); + for (var factory : blockFactories) { + if (factory instanceof MockBlockFactory mockBlockFactory) { + mockBlockFactory.ensureAllBlocksAreReleased(); + } + } + for (CircuitBreaker breaker : breakers) { + assertThat("Unexpected used in breaker: " + breaker, breaker.getUsed(), equalTo(0L)); + } + } +} diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java index 27076c2adf2d2..a3af5aafcbee3 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverContextTests.java @@ -13,7 +13,7 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; @@ -167,7 +167,7 @@ static class AssertingDriverContext extends DriverContext { AssertingDriverContext() { super( new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()), - BlockFactory.getNonBreakingInstance() + TestBlockFactory.getNonBreakingInstance() ); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java index f6b4fbc817940..ec9952cdce022 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverProfileTests.java @@ -30,19 +30,23 @@ public void testToXContent() { new DriverStatus.OperatorStatus("ValuesSourceReader", ValuesSourceReaderOperatorStatusTests.simple()) ) ); - assertThat( - Strings.toString(status), - equalTo( - """ - {"operators":[""" - + """ - {"operator":"LuceneSource","status":""" - + LuceneSourceOperatorStatusTests.simpleToJson() - + "},{\"operator\":\"ValuesSourceReader\",\"status\":" - + ValuesSourceReaderOperatorStatusTests.simpleToJson() - + "}]}" - ) - ); + assertThat(Strings.toString(status, true, true), equalTo(""" + { + "operators" : [ + { + "operator" : "LuceneSource", + "status" : + """.stripTrailing() + " " + LuceneSourceOperatorStatusTests.simpleToJson().replace("\n", "\n ") + """ + + }, + { + "operator" : "ValuesSourceReader", + "status" : + """.stripTrailing() + " " + ValuesSourceReaderOperatorStatusTests.simpleToJson().replace("\n", "\n ") + """ + + } + ] + }""")); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java index cdae4283540c4..c10bcf8d49ca4 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/DriverStatusTests.java @@ -39,15 +39,34 @@ public void testToXContent() { ), List.of(new DriverStatus.OperatorStatus("ExchangeSink", ExchangeSinkOperatorStatusTests.simple())) ); - assertThat(Strings.toString(status), equalTo(""" - {"sessionId":"ABC:123","last_updated":"1973-11-29T09:27:23.214Z","status":"running", - """.trim() + """ - "completed_operators":[{"operator":"LuceneSource","status": - """.trim() + LuceneSourceOperatorStatusTests.simpleToJson() + """ - },{"operator":"ValuesSourceReader","status": - """.trim() + ValuesSourceReaderOperatorStatusTests.simpleToJson() + """ - }],"active_operators":[{"operator":"ExchangeSink","status": - """.trim() + ExchangeSinkOperatorStatusTests.simpleToJson() + "}]}")); + assertThat(Strings.toString(status, true, true), equalTo(""" + { + "sessionId" : "ABC:123", + "last_updated" : "1973-11-29T09:27:23.214Z", + "status" : "running", + "completed_operators" : [ + { + "operator" : "LuceneSource", + "status" : + """.trim() + " " + LuceneSourceOperatorStatusTests.simpleToJson().replace("\n", "\n ") + """ + + }, + { + "operator" : "ValuesSourceReader", + "status" : + """.stripTrailing() + " " + ValuesSourceReaderOperatorStatusTests.simpleToJson().replace("\n", "\n ") + """ + + } + ], + "active_operators" : [ + { + "operator" : "ExchangeSink", + "status" : + """.stripTrailing() + " " + ExchangeSinkOperatorStatusTests.simpleToJson().replace("\n", "\n ") + """ + + } + ] + }""")); } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java index c755c5eafe08d..c5ff7cca76c29 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/EvalOperatorTests.java @@ -37,7 +37,7 @@ record Addition(DriverContext driverContext, int lhs, int rhs) implements EvalOp public Block eval(Page page) { LongVector lhsVector = page.getBlock(0).asVector(); LongVector rhsVector = page.getBlock(1).asVector(); - try (LongVector.FixedBuilder result = LongVector.newVectorFixedBuilder(page.getPositionCount(), driverContext.blockFactory())) { + try (LongVector.FixedBuilder result = driverContext.blockFactory().newLongVectorFixedBuilder(page.getPositionCount())) { for (int p = 0; p < page.getPositionCount(); p++) { result.appendLong(lhsVector.getLong(p) + rhsVector.getLong(p)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java index d067435ba9aaa..ec6a2d408e990 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/FilterOperatorTests.java @@ -36,7 +36,7 @@ record SameLastDigit(DriverContext context, int lhs, int rhs) implements EvalOpe public Block eval(Page page) { LongVector lhsVector = page.getBlock(0).asVector(); LongVector rhsVector = page.getBlock(1).asVector(); - BooleanVector.FixedBuilder result = BooleanVector.newVectorFixedBuilder(page.getPositionCount(), context.blockFactory()); + BooleanVector.FixedBuilder result = context.blockFactory().newBooleanVectorFixedBuilder(page.getPositionCount()); for (int p = 0; p < page.getPositionCount(); p++) { result.appendBoolean(lhsVector.getLong(p) % 10 == rhsVector.getLong(p) % 10); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java index 9403d22f2b4c4..3986c4b337e03 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/ForkingOperatorTestCase.java @@ -14,9 +14,9 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.aggregation.AggregatorMode; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeSinkHandler; import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; import org.elasticsearch.compute.operator.exchange.ExchangeSourceHandler; @@ -61,7 +61,7 @@ public final void testInitialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = new ArrayList<>(); try ( Driver d = new Driver( @@ -85,7 +85,7 @@ public final void testManyInitialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List partials = oneDriverPerPage(input, () -> List.of(simpleWithMode(bigArrays, AggregatorMode.INITIAL).get(driverContext))); List results = new ArrayList<>(); try ( @@ -107,7 +107,7 @@ public final void testInitialIntermediateFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = new ArrayList<>(); try ( @@ -133,7 +133,7 @@ public final void testManyInitialManyPartialFinal() { BigArrays bigArrays = nonBreakingBigArrays(); DriverContext driverContext = driverContext(); List input = CannedSourceOperator.collectPages(simpleInput(driverContext.blockFactory(), between(1_000, 100_000))); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List partials = oneDriverPerPage(input, () -> List.of(simpleWithMode(bigArrays, AggregatorMode.INITIAL).get(driverContext))); Collections.shuffle(partials, random()); @@ -163,7 +163,7 @@ public final void testManyInitialManyPartialFinal() { public final void testManyInitialManyPartialFinalRunner() { BigArrays bigArrays = nonBreakingBigArrays(); List input = CannedSourceOperator.collectPages(simpleInput(driverContext().blockFactory(), between(1_000, 100_000))); - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = new ArrayList<>(); List drivers = createDriversForInput(bigArrays, input, results, false /* no throwing ops */); var runner = new DriverRunner(threadPool.getThreadContext()) { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java index 50b20a2ffdcff..b82ded7cb812f 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MultivalueDedupeTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matcher; @@ -374,7 +375,7 @@ private int assertEncodedPosition(BasicBlockTests.RandomBlock b, BatchEncoder en * This produces a block with a single value per position, but it's good enough * for comparison. */ - Block.Builder builder = elementType.newBlockBuilder(encoder.valueCount(offset)); + Block.Builder builder = elementType.newBlockBuilder(encoder.valueCount(offset), TestBlockFactory.getNonBreakingInstance()); BytesRef[] toDecode = new BytesRef[encoder.valueCount(offset)]; for (int i = 0; i < toDecode.length; i++) { BytesRefBuilder dest = new BytesRefBuilder(); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java index 3572dc620287d..ab2deccbe63a9 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/MvExpandOperatorTests.java @@ -12,9 +12,8 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.TestBlockFactory; import java.util.Iterator; import java.util.List; @@ -208,10 +207,11 @@ protected ByteSizeValue smallEnoughToCircuitBreak() { } public void testNoopStatus() { + BlockFactory blockFactory = blockFactory(); MvExpandOperator op = new MvExpandOperator(0, randomIntBetween(1, 1000)); List result = drive( op, - List.of(new Page(IntVector.newVectorBuilder(2).appendInt(1).appendInt(2).build().asBlock())).iterator(), + List.of(new Page(blockFactory.newIntVectorBuilder(2).appendInt(1).appendInt(2).build().asBlock())).iterator(), driverContext() ); assertThat(result, hasSize(1)); @@ -224,7 +224,8 @@ public void testNoopStatus() { public void testExpandStatus() { MvExpandOperator op = new MvExpandOperator(0, randomIntBetween(1, 1)); - var builder = IntBlock.newBlockBuilder(2).beginPositionEntry().appendInt(1).appendInt(2).endPositionEntry(); + BlockFactory blockFactory = blockFactory(); + var builder = blockFactory.newIntBlockBuilder(2).beginPositionEntry().appendInt(1).appendInt(2).endPositionEntry(); List result = drive(op, List.of(new Page(builder.build())).iterator(), driverContext()); assertThat(result, hasSize(1)); assertThat(valuesAtPositions(result.get(0).getBlock(0), 0, 2), equalTo(List.of(List.of(1), List.of(2)))); @@ -232,6 +233,7 @@ public void testExpandStatus() { assertThat(status.pagesIn(), equalTo(1)); assertThat(status.pagesOut(), equalTo(1)); assertThat(status.noops(), equalTo(0)); + result.forEach(Page::releaseBlocks); } public void testExpandWithBytesRefs() { @@ -253,7 +255,7 @@ protected Page createPage(int positionOffset, int length) { ); } }); - List origInput = deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); List results = drive(new MvExpandOperator(0, randomIntBetween(1, 1000)), input.iterator(), context); assertSimpleOutput(origInput, results); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java index 2f1cc2981766e..1da1269ec1793 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/OperatorTestCase.java @@ -24,6 +24,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.core.Releasables; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.CrankyCircuitBreakerService; @@ -186,7 +187,7 @@ protected final void assertSimple(DriverContext context, int size) { } // Clone the input so that the operator can close it, then, later, we can read it again to build the assertion. - List origInput = BlockTestUtils.deepCopyOf(input, BlockFactory.getNonBreakingInstance()); + List origInput = BlockTestUtils.deepCopyOf(input, TestBlockFactory.getNonBreakingInstance()); BigArrays bigArrays = context.bigArrays().withCircuitBreaking(); List results = drive(simple(bigArrays).get(context), input.iterator(), context); @@ -270,10 +271,10 @@ public static void runDriver(List drivers) { drivers.add( new Driver( "dummy-session", - new DriverContext(BigArrays.NON_RECYCLING_INSTANCE, BlockFactory.getNonBreakingInstance()), + new DriverContext(BigArrays.NON_RECYCLING_INSTANCE, TestBlockFactory.getNonBreakingInstance()), () -> "dummy-driver", new SequenceLongBlockSourceOperator( - BlockFactory.getNonBreakingInstance(), + TestBlockFactory.getNonBreakingInstance(), LongStream.range(0, between(1, 100)), between(1, 100) ), diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java index c8250eba5703a..cd8a49939fbb5 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/RowOperatorTests.java @@ -11,12 +11,12 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; @@ -28,7 +28,7 @@ public class RowOperatorTests extends ESTestCase { final DriverContext driverContext = new DriverContext( new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), - BlockFactory.getNonBreakingInstance() + TestBlockFactory.getNonBreakingInstance() ); public void testBoolean() { diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java index 7c1c62aea6ab9..b92c6d01e5077 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/SequenceBooleanBlockSourceOperator.java @@ -37,12 +37,13 @@ public SequenceBooleanBlockSourceOperator(BlockFactory blockFactory, List @Override protected Page createPage(int positionOffset, int length) { - DoubleVector.FixedBuilder builder = DoubleVector.newVectorFixedBuilder(length, blockFactory); + DoubleVector.FixedBuilder builder = blockFactory.newDoubleVectorFixedBuilder(length); for (int i = 0; i < length; i++) { builder.appendDouble(values[positionOffset + i]); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java index 70ef2118fcef0..05f1aef5e2b1e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/StringExtractOperatorTests.java @@ -103,8 +103,9 @@ public Block eval(Page page) { public void close() {} }, new FirstWord("test"), driverContext()); - Page result = null; - try (BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(1)) { + BlockFactory blockFactory = blockFactory(); + final Page result; + try (BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(1)) { builder.beginPositionEntry(); builder.appendBytesRef(new BytesRef("foo1 bar1")); builder.appendBytesRef(new BytesRef("foo2 bar2")); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TestResultPageSinkOperator.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TestResultPageSinkOperator.java index aaa3a6ac8a3c8..e2cb0e21938e2 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TestResultPageSinkOperator.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/TestResultPageSinkOperator.java @@ -7,9 +7,9 @@ package org.elasticsearch.compute.operator; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockTestUtils; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.data.TestBlockFactory; import java.util.function.Consumer; @@ -21,7 +21,7 @@ public class TestResultPageSinkOperator extends PageConsumerOperator { public TestResultPageSinkOperator(Consumer pageConsumer) { super(page -> { - Page copy = BlockTestUtils.deepCopyOf(page, BlockFactory.getNonBreakingInstance()); + Page copy = BlockTestUtils.deepCopyOf(page, TestBlockFactory.getNonBreakingInstance()); page.releaseBlocks(); pageConsumer.accept(copy); }); diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java index 74e83017e03bf..5bbe77450762b 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeServiceTests.java @@ -24,8 +24,8 @@ import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.ConstantIntVector; import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.MockBlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; @@ -85,9 +85,10 @@ public void shutdownThreadPool() { } public void testBasic() throws Exception { + BlockFactory blockFactory = blockFactory(); Page[] pages = new Page[7]; for (int i = 0; i < pages.length; i++) { - pages[i] = new Page(new ConstantIntVector(i, 2).asBlock()); + pages[i] = new Page(blockFactory.newConstantIntBlockWith(i, 2)); } ExchangeSinkHandler sinkExchanger = new ExchangeSinkHandler(2, threadPool::relativeTimeInMillis); ExchangeSink sink1 = sinkExchanger.createExchangeSink(); @@ -143,6 +144,9 @@ public void testBasic() throws Exception { sourceExchanger.decRef(); assertTrue(latch.await(1, TimeUnit.SECONDS)); ESTestCase.terminate(threadPool); + for (Page page : pages) { + page.releaseBlocks(); + } } /** @@ -180,14 +184,15 @@ public Page getOutput() { return null; } int size = randomIntBetween(1, 10); - IntBlock.Builder builder = IntBlock.newBlockBuilder(size); - for (int i = 0; i < size; i++) { - int seqNo = nextSeqNo.incrementAndGet(); - if (seqNo < maxInputSeqNo) { - builder.appendInt(seqNo); + try (IntBlock.Builder builder = driverContext.blockFactory().newIntBlockBuilder(size)) { + for (int i = 0; i < size; i++) { + int seqNo = nextSeqNo.incrementAndGet(); + if (seqNo < maxInputSeqNo) { + builder.appendInt(seqNo); + } } + return new Page(builder.build()); } - return new Page(builder.build()); } @Override @@ -338,8 +343,9 @@ public void testConcurrentWithHandlers() { } public void testEarlyTerminate() { - IntBlock block1 = new ConstantIntVector(1, 2).asBlock(); - IntBlock block2 = new ConstantIntVector(1, 2).asBlock(); + BlockFactory blockFactory = blockFactory(); + IntBlock block1 = blockFactory.newConstantIntBlockWith(1, 2); + IntBlock block2 = blockFactory.newConstantIntBlockWith(1, 2); Page p1 = new Page(block1); Page p2 = new Page(block2); ExchangeSinkHandler sinkExchanger = new ExchangeSinkHandler(2, threadPool::relativeTimeInMillis); @@ -368,7 +374,7 @@ public void testConcurrentWithTransportActions() throws Exception { try (exchange0; exchange1; node0; node1) { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); - ExchangeSourceHandler sourceHandler = exchange0.createSourceHandler(exchangeId, randomExchangeBuffer(), ESQL_TEST_EXECUTOR); + var sourceHandler = new ExchangeSourceHandler(randomExchangeBuffer(), threadPool.executor(ESQL_TEST_EXECUTOR)); ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomExchangeBuffer()); sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, node1.getLocalNode()), randomIntBetween(1, 5)); final int maxInputSeqNo = rarely() ? -1 : randomIntBetween(0, 50_000); @@ -410,8 +416,8 @@ public void sendResponse(TransportResponse transportResponse) throws IOException } } } - ExchangeResponse newResp = new ExchangeResponse(page, origResp.finished()); origResp.decRef(); + ExchangeResponse newResp = new ExchangeResponse(page, origResp.finished()); super.sendResponse(newResp); } }; @@ -421,8 +427,8 @@ public void sendResponse(TransportResponse transportResponse) throws IOException try (exchange0; exchange1; node0; node1) { String exchangeId = "exchange"; Task task = new Task(1, "", "", "", null, Collections.emptyMap()); - ExchangeSourceHandler sourceHandler = exchange0.createSourceHandler(exchangeId, randomIntBetween(1, 128), ESQL_TEST_EXECUTOR); - ExchangeSinkHandler sinkHandler = exchange1.createSinkHandler(exchangeId, randomIntBetween(1, 128)); + var sourceHandler = new ExchangeSourceHandler(randomIntBetween(1, 128), threadPool.executor(ESQL_TEST_EXECUTOR)); + var sinkHandler = exchange1.createSinkHandler(exchangeId, randomIntBetween(1, 128)); sourceHandler.addRemoteSink(exchange0.newRemoteSink(task, exchangeId, node0, node1.getLocalNode()), randomIntBetween(1, 5)); Exception err = expectThrows( Exception.class, @@ -431,6 +437,7 @@ public void sendResponse(TransportResponse transportResponse) throws IOException Throwable cause = ExceptionsHelper.unwrap(err, IOException.class); assertNotNull(cause); assertThat(cause.getMessage(), equalTo("page is too large")); + sinkHandler.onFailure(new RuntimeException(cause)); } } @@ -495,11 +502,18 @@ private BlockFactory blockFactory() { MockBigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, ByteSizeValue.ofGb(1)); CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); breakers.add(breaker); - return new BlockFactory(breaker, bigArrays); + MockBlockFactory factory = new MockBlockFactory(breaker, bigArrays); + blockFactories.add(factory); + return factory; } + private final List blockFactories = new ArrayList<>(); + @After public void allMemoryReleased() { + for (MockBlockFactory blockFactory : blockFactories) { + blockFactory.ensureAllBlocksAreReleased(); + } for (CircuitBreaker breaker : breakers) { assertThat(breaker.getUsed(), equalTo(0L)); } diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java index 7438055284b14..369913c7d152c 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/exchange/ExchangeSinkOperatorStatusTests.java @@ -17,7 +17,7 @@ public class ExchangeSinkOperatorStatusTests extends AbstractWireSerializingTestCase { public void testToXContent() { - assertThat(Strings.toString(simple()), equalTo(simpleToJson())); + assertThat(Strings.toString(simple(), true, true), equalTo(simpleToJson())); } public static ExchangeSinkOperator.Status simple() { @@ -26,7 +26,9 @@ public static ExchangeSinkOperator.Status simple() { public static String simpleToJson() { return """ - {"pages_accepted":10}"""; + { + "pages_accepted" : 10 + }"""; } @Override diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java index 7c8f68549c8a4..b2d0f288c900e 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/ExtractorTests.java @@ -19,7 +19,7 @@ import org.elasticsearch.compute.data.BlockUtils; import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.compute.operator.BreakingBytesRefBuilder; import org.elasticsearch.test.ESTestCase; @@ -34,6 +34,7 @@ public class ExtractorTests extends ESTestCase { @ParametersFactory public static Iterable parameters() { + BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); List cases = new ArrayList<>(); for (ElementType e : ElementType.values()) { switch (e) { @@ -79,9 +80,9 @@ public static Iterable parameters() { e, TopNEncoder.DEFAULT_UNSORTABLE, () -> new DocVector( - IntBlock.newConstantBlockWith(randomInt(), 1).asVector(), - IntBlock.newConstantBlockWith(randomInt(), 1).asVector(), - IntBlock.newConstantBlockWith(randomInt(), 1).asVector(), + blockFactory.newConstantIntBlockWith(randomInt(), 1).asVector(), + blockFactory.newConstantIntBlockWith(randomInt(), 1).asVector(), + blockFactory.newConstantIntBlockWith(randomInt(), 1).asVector(), randomBoolean() ? null : randomBoolean() ).asBlock() ) } @@ -109,7 +110,7 @@ static Object[] valueTestCase(String name, ElementType type, TopNEncoder encoder name, type, encoder, - () -> BlockUtils.fromListRow(BlockFactory.getNonBreakingInstance(), Arrays.asList(value.get()))[0] + () -> BlockUtils.fromListRow(TestBlockFactory.getNonBreakingInstance(), Arrays.asList(value.get()))[0] ) }; } @@ -150,7 +151,7 @@ public void testNotInKey() { assertThat(valuesBuilder.length(), greaterThan(0)); ResultBuilder result = ResultBuilder.resultBuilderFor( - BlockFactory.getNonBreakingInstance(), + TestBlockFactory.getNonBreakingInstance(), testCase.type, testCase.encoder.toUnsortable(), false, @@ -177,7 +178,7 @@ public void testInKey() { assertThat(valuesBuilder.length(), greaterThan(0)); ResultBuilder result = ResultBuilder.resultBuilderFor( - BlockFactory.getNonBreakingInstance(), + TestBlockFactory.getNonBreakingInstance(), testCase.type, testCase.encoder.toUnsortable(), true, diff --git a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java index be3e75fcce2a2..8bde782b4a011 100644 --- a/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java +++ b/x-pack/plugin/esql/compute/src/test/java/org/elasticsearch/compute/operator/topn/TopNOperatorTests.java @@ -18,15 +18,12 @@ import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleBlock; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.data.TestBlockBuilder; +import org.elasticsearch.compute.data.TestBlockFactory; import org.elasticsearch.compute.operator.CannedSourceOperator; import org.elasticsearch.compute.operator.Driver; import org.elasticsearch.compute.operator.DriverContext; @@ -305,14 +302,14 @@ private List topNLong(List inputValues, int limit, boolean ascending } public void testCompareInts() { + BlockFactory blockFactory = blockFactory(); testCompare( new Page( - new Block[] { - IntBlock.newBlockBuilder(2).appendInt(Integer.MIN_VALUE).appendInt(randomIntBetween(-1000, -1)).build(), - IntBlock.newBlockBuilder(2).appendInt(randomIntBetween(-1000, -1)).appendInt(0).build(), - IntBlock.newBlockBuilder(2).appendInt(0).appendInt(randomIntBetween(1, 1000)).build(), - IntBlock.newBlockBuilder(2).appendInt(randomIntBetween(1, 1000)).appendInt(Integer.MAX_VALUE).build(), - IntBlock.newBlockBuilder(2).appendInt(0).appendInt(Integer.MAX_VALUE).build() } + blockFactory.newIntBlockBuilder(2).appendInt(Integer.MIN_VALUE).appendInt(randomIntBetween(-1000, -1)).build(), + blockFactory.newIntBlockBuilder(2).appendInt(randomIntBetween(-1000, -1)).appendInt(0).build(), + blockFactory.newIntBlockBuilder(2).appendInt(0).appendInt(randomIntBetween(1, 1000)).build(), + blockFactory.newIntBlockBuilder(2).appendInt(randomIntBetween(1, 1000)).appendInt(Integer.MAX_VALUE).build(), + blockFactory.newIntBlockBuilder(2).appendInt(0).appendInt(Integer.MAX_VALUE).build() ), INT, DEFAULT_SORTABLE @@ -320,14 +317,14 @@ public void testCompareInts() { } public void testCompareLongs() { + BlockFactory blockFactory = blockFactory(); testCompare( new Page( - new Block[] { - LongBlock.newBlockBuilder(2).appendLong(Long.MIN_VALUE).appendLong(randomLongBetween(-1000, -1)).build(), - LongBlock.newBlockBuilder(2).appendLong(randomLongBetween(-1000, -1)).appendLong(0).build(), - LongBlock.newBlockBuilder(2).appendLong(0).appendLong(randomLongBetween(1, 1000)).build(), - LongBlock.newBlockBuilder(2).appendLong(randomLongBetween(1, 1000)).appendLong(Long.MAX_VALUE).build(), - LongBlock.newBlockBuilder(2).appendLong(0).appendLong(Long.MAX_VALUE).build() } + blockFactory.newLongBlockBuilder(2).appendLong(Long.MIN_VALUE).appendLong(randomLongBetween(-1000, -1)).build(), + blockFactory.newLongBlockBuilder(2).appendLong(randomLongBetween(-1000, -1)).appendLong(0).build(), + blockFactory.newLongBlockBuilder(2).appendLong(0).appendLong(randomLongBetween(1, 1000)).build(), + blockFactory.newLongBlockBuilder(2).appendLong(randomLongBetween(1, 1000)).appendLong(Long.MAX_VALUE).build(), + blockFactory.newLongBlockBuilder(2).appendLong(0).appendLong(Long.MAX_VALUE).build() ), LONG, DEFAULT_SORTABLE @@ -335,17 +332,17 @@ public void testCompareLongs() { } public void testCompareDoubles() { + BlockFactory blockFactory = blockFactory(); testCompare( new Page( - new Block[] { - DoubleBlock.newBlockBuilder(2) - .appendDouble(-Double.MAX_VALUE) - .appendDouble(randomDoubleBetween(-1000, -1, true)) - .build(), - DoubleBlock.newBlockBuilder(2).appendDouble(randomDoubleBetween(-1000, -1, true)).appendDouble(0.0).build(), - DoubleBlock.newBlockBuilder(2).appendDouble(0).appendDouble(randomDoubleBetween(1, 1000, true)).build(), - DoubleBlock.newBlockBuilder(2).appendDouble(randomLongBetween(1, 1000)).appendDouble(Double.MAX_VALUE).build(), - DoubleBlock.newBlockBuilder(2).appendDouble(0.0).appendDouble(Double.MAX_VALUE).build() } + blockFactory.newDoubleBlockBuilder(2) + .appendDouble(-Double.MAX_VALUE) + .appendDouble(randomDoubleBetween(-1000, -1, true)) + .build(), + blockFactory.newDoubleBlockBuilder(2).appendDouble(randomDoubleBetween(-1000, -1, true)).appendDouble(0.0).build(), + blockFactory.newDoubleBlockBuilder(2).appendDouble(0).appendDouble(randomDoubleBetween(1, 1000, true)).build(), + blockFactory.newDoubleBlockBuilder(2).appendDouble(randomLongBetween(1, 1000)).appendDouble(Double.MAX_VALUE).build(), + blockFactory.newDoubleBlockBuilder(2).appendDouble(0.0).appendDouble(Double.MAX_VALUE).build() ), DOUBLE, DEFAULT_SORTABLE @@ -353,10 +350,10 @@ public void testCompareDoubles() { } public void testCompareUtf8() { + BlockFactory blockFactory = blockFactory(); testCompare( new Page( - new Block[] { - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("bye")).appendBytesRef(new BytesRef("hello")).build() } + blockFactory.newBytesRefBlockBuilder(2).appendBytesRef(new BytesRef("bye")).appendBytesRef(new BytesRef("hello")).build() ), BYTES_REF, UTF8 @@ -364,15 +361,16 @@ public void testCompareUtf8() { } public void testCompareBooleans() { + BlockFactory blockFactory = blockFactory(); testCompare( - new Page(new Block[] { BooleanBlock.newBlockBuilder(2).appendBoolean(false).appendBoolean(true).build() }), + new Page(blockFactory.newBooleanBlockBuilder(2).appendBoolean(false).appendBoolean(true).build()), BOOLEAN, DEFAULT_SORTABLE ); } private void testCompare(Page page, ElementType elementType, TopNEncoder encoder) { - Block nullBlock = Block.constantNullBlock(1); + Block nullBlock = TestBlockFactory.getNonBreakingInstance().newConstantNullBlock(1); Page nullPage = new Page(new Block[] { nullBlock, nullBlock, nullBlock, nullBlock, nullBlock }); for (int b = 0; b < page.getBlockCount(); b++) { @@ -423,6 +421,7 @@ private void testCompare(Page page, ElementType elementType, TopNEncoder encoder assertThat(TopNOperator.compareRows(r2, r1), greaterThan(0)); } } + page.releaseBlocks(); } private TopNOperator.Row row( @@ -1386,7 +1385,7 @@ public void testCloseWithoutCompleting() { randomPageSize() ) ) { - op.addInput(new Page(new IntArrayVector(new int[] { 1 }, 1).asBlock())); + op.addInput(new Page(blockFactory().newIntArrayVector(new int[] { 1 }, 1).asBlock())); } } diff --git a/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java b/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java index 37f2c86dbc251..118b298025526 100644 --- a/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java +++ b/x-pack/plugin/esql/qa/server/heap-attack/src/javaRestTest/java/org/elasticsearch/xpack/esql/qa/heap_attack/HeapAttackIT.java @@ -9,6 +9,7 @@ import org.apache.http.client.config.RequestConfig; import org.apache.http.util.EntityUtils; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -47,6 +48,7 @@ * Tests that run ESQL queries that have, in the past, used so much memory they * crash Elasticsearch. */ +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103527") public class HeapAttackIT extends ESRestTestCase { /** * This used to fail, but we've since compacted top n so it actually succeeds now. @@ -344,7 +346,6 @@ public void testFetchMvLongs() throws IOException { assertMap(map, matchesMap().entry("columns", columns)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/100528") public void testFetchTooManyMvLongs() throws IOException { initMvLongsIndex(500, 100, 1000); assertCircuitBreaks(() -> fetchMvLongs()); diff --git a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java index c341ad26cb7a6..5fd6b2a5618c7 100644 --- a/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java +++ b/x-pack/plugin/esql/qa/server/src/main/java/org/elasticsearch/xpack/esql/qa/rest/generative/GenerativeRestTest.java @@ -7,9 +7,12 @@ package org.elasticsearch.xpack.esql.qa.rest.generative; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.ResponseException; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.esql.CsvTestsDataLoader; import org.elasticsearch.xpack.esql.qa.rest.RestEsqlTestCase; +import org.junit.AfterClass; import org.junit.Before; import java.io.IOException; @@ -46,6 +49,18 @@ public void setup() throws IOException { } } + @AfterClass + public static void wipeTestData() throws IOException { + try { + adminClient().performRequest(new Request("DELETE", "/*")); + } catch (ResponseException e) { + // 404 here just means we had no indexes + if (e.getResponse().getStatusLine().getStatusCode() != 404) { + throw e; + } + } + } + public void test() { List indices = availableIndices(); List policies = availableEnrichPolicies(); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java index ebe27225becb1..d193501386488 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/java/org/elasticsearch/xpack/esql/CsvTestUtils.java @@ -11,7 +11,9 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.Version; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockUtils; @@ -24,7 +26,7 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.logging.Logger; import org.elasticsearch.test.VersionUtils; -import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; +import org.elasticsearch.xpack.esql.action.ResponseValueUtils; import org.elasticsearch.xpack.ql.util.StringUtils; import org.supercsv.io.CsvListReader; import org.supercsv.prefs.CsvPreference; @@ -139,6 +141,7 @@ public void close() { CsvColumn[] columns = null; + var blockFactory = BlockFactory.getInstance(new NoopCircuitBreaker("test-noop"), BigArrays.NON_RECYCLING_INSTANCE); try (BufferedReader reader = org.elasticsearch.xpack.ql.TestUtils.reader(source)) { String line; int lineNumber = 1; @@ -178,7 +181,7 @@ public void close() { columns[i] = new CsvColumn( name, type, - BlockUtils.wrapperFor(BlockFactory.getNonBreakingInstance(), ElementType.fromJava(type.clazz()), 8) + BlockUtils.wrapperFor(blockFactory, ElementType.fromJava(type.clazz()), 8) ); } } @@ -477,7 +480,7 @@ record ActualResults( Map> responseHeaders ) { Iterator> values() { - return EsqlQueryResponse.pagesToValues(dataTypes(), pages); + return ResponseValueUtils.pagesToValues(dataTypes(), pages); } } diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec index ea53ac5679aa9..177e169387642 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/conditional.csv-spec @@ -93,3 +93,37 @@ M |10 M |10 M |10 ; + +docsCaseSuccessRate +// tag::docsCaseSuccessRate[] +FROM sample_data +| EVAL successful = CASE( + STARTS_WITH(message, "Connected to"), 1, + message == "Connection error", 0 + ) +| STATS success_rate = AVG(successful) +// end::docsCaseSuccessRate[] +; + +// tag::docsCaseSuccessRate-result[] +success_rate:double +0.5 +// end::docsCaseSuccessRate-result[] +; + +docsCaseHourlyErrorRate +// tag::docsCaseHourlyErrorRate[] +FROM sample_data +| EVAL error = CASE(message LIKE "*error*", 1, 0) +| EVAL hour = DATE_TRUNC(1 hour, @timestamp) +| STATS error_rate = AVG(error) by hour +| SORT hour +// end::docsCaseHourlyErrorRate[] +; + +// tag::docsCaseHourlyErrorRate-result[] +error_rate:double | hour:date +0.0 |2023-10-23T12:00:00.000Z +0.6 |2023-10-23T13:00:00.000Z +// end::docsCaseHourlyErrorRate-result[] +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec index f6c0666c54ed8..88f582cd50662 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/date.csv-spec @@ -139,9 +139,9 @@ emp_no:integer | birth_date:date | x:date ; evalDateTruncGrouping -from employees | eval y = date_trunc(1 year, hire_date) | stats count(emp_no) by y | sort y | keep y, count(emp_no) | limit 5; +from employees | eval y = date_trunc(1 year, hire_date) | stats c = count(emp_no) by y | sort y | keep y, c | limit 5; -y:date | count(emp_no):long +y:date | c:long 1985-01-01T00:00:00.000Z | 11 1986-01-01T00:00:00.000Z | 11 1987-01-01T00:00:00.000Z | 15 @@ -726,6 +726,86 @@ birth_date:datetime 1953-04-21T00:00:00.000Z ; +docsAutoBucketMonth +//tag::docsAutoBucketMonth[] +FROM employees +| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" +| EVAL month = AUTO_BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| KEEP hire_date, month +| SORT hire_date +//end::docsAutoBucketMonth[] +; + +//tag::docsAutoBucketMonth-result[] + hire_date:date | month:date +1985-02-18T00:00:00.000Z|1985-02-01T00:00:00.000Z +1985-02-24T00:00:00.000Z|1985-02-01T00:00:00.000Z +1985-05-13T00:00:00.000Z|1985-05-01T00:00:00.000Z +1985-07-09T00:00:00.000Z|1985-07-01T00:00:00.000Z +1985-09-17T00:00:00.000Z|1985-09-01T00:00:00.000Z +1985-10-14T00:00:00.000Z|1985-10-01T00:00:00.000Z +1985-10-20T00:00:00.000Z|1985-10-01T00:00:00.000Z +1985-11-19T00:00:00.000Z|1985-11-01T00:00:00.000Z +1985-11-20T00:00:00.000Z|1985-11-01T00:00:00.000Z +1985-11-20T00:00:00.000Z|1985-11-01T00:00:00.000Z +1985-11-21T00:00:00.000Z|1985-11-01T00:00:00.000Z +//end::docsAutoBucketMonth-result[] +; + +docsAutoBucketMonthlyHistogram +//tag::docsAutoBucketMonthlyHistogram[] +FROM employees +| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" +| EVAL month = AUTO_BUCKET(hire_date, 20, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| STATS hires_per_month = COUNT(*) BY month +| SORT month +//end::docsAutoBucketMonthlyHistogram[] +; + +//tag::docsAutoBucketMonthlyHistogram-result[] + hires_per_month:long | month:date +2 |1985-02-01T00:00:00.000Z +1 |1985-05-01T00:00:00.000Z +1 |1985-07-01T00:00:00.000Z +1 |1985-09-01T00:00:00.000Z +2 |1985-10-01T00:00:00.000Z +4 |1985-11-01T00:00:00.000Z +//end::docsAutoBucketMonthlyHistogram-result[] +; + +docsAutoBucketWeeklyHistogram +//tag::docsAutoBucketWeeklyHistogram[] +FROM employees +| WHERE hire_date >= "1985-01-01T00:00:00Z" AND hire_date < "1986-01-01T00:00:00Z" +| EVAL week = AUTO_BUCKET(hire_date, 100, "1985-01-01T00:00:00Z", "1986-01-01T00:00:00Z") +| STATS hires_per_week = COUNT(*) BY week +| SORT week +//end::docsAutoBucketWeeklyHistogram[] +; + +//tag::docsAutoBucketWeeklyHistogram-result[] + hires_per_week:long | week:date +2 |1985-02-18T00:00:00.000Z +1 |1985-05-13T00:00:00.000Z +1 |1985-07-08T00:00:00.000Z +1 |1985-09-16T00:00:00.000Z +2 |1985-10-14T00:00:00.000Z +4 |1985-11-18T00:00:00.000Z +//end::docsAutoBucketWeeklyHistogram-result[] +; + +docsAutoBucketLast24hr +//tag::docsAutoBucketLast24hr[] +FROM sample_data +| WHERE @timestamp >= NOW() - 1 day and @timestamp < NOW() +| EVAL bucket = AUTO_BUCKET(@timestamp, 25, DATE_FORMAT(NOW() - 1 day), DATE_FORMAT(NOW())) +| STATS COUNT(*) BY bucket +//end::docsAutoBucketLast24hr[] +; + + COUNT(*):long | bucket:date +; + docsGettingStartedAutoBucket // tag::gs-auto_bucket[] FROM sample_data @@ -767,3 +847,92 @@ median_duration:double | bucket:date 3107561.0 |2023-10-23T12:00:00.000Z 1756467.0 |2023-10-23T13:00:00.000Z ; + +dateExtract +// tag::dateExtract[] +ROW date = DATE_PARSE("yyyy-MM-dd", "2022-05-06") +| EVAL year = DATE_EXTRACT("year", date) +// end::dateExtract[] +; + +// tag::dateExtract-result[] +date:date | year:long +2022-05-06T00:00:00.000Z | 2022 +// end::dateExtract-result[] +; + +docsDateExtractBusinessHours +// tag::docsDateExtractBusinessHours[] +FROM sample_data +| WHERE DATE_EXTRACT("hour_of_day", @timestamp) < 9 AND DATE_EXTRACT("hour_of_day", @timestamp) >= 17 +// end::docsDateExtractBusinessHours[] +; + +// tag::docsDateExtractBusinessHours-result[] +@timestamp:date | client_ip:ip |event_duration:long | message:keyword +// end::docsDateExtractBusinessHours-result[] +; + +docsDateFormat +// tag::docsDateFormat[] +FROM employees +| KEEP first_name, last_name, hire_date +| EVAL hired = DATE_FORMAT("YYYY-MM-dd", hire_date) +// end::docsDateFormat[] +| SORT first_name +| LIMIT 3 +; + +// tag::docsDateFormat-result[] +first_name:keyword | last_name:keyword | hire_date:date | hired:keyword +Alejandro |McAlpine |1991-06-26T00:00:00.000Z|1991-06-26 +Amabile |Gomatam |1992-11-18T00:00:00.000Z|1992-11-18 +Anneke |Preusig |1989-06-02T00:00:00.000Z|1989-06-02 +// end::docsDateFormat-result[] +; + +docsDateTrunc +// tag::docsDateTrunc[] +FROM employees +| KEEP first_name, last_name, hire_date +| EVAL year_hired = DATE_TRUNC(1 year, hire_date) +// end::docsDateTrunc[] +| SORT first_name +| LIMIT 3 +; + +// tag::docsDateTrunc-result[] +first_name:keyword | last_name:keyword | hire_date:date | year_hired:date +Alejandro |McAlpine |1991-06-26T00:00:00.000Z|1991-01-01T00:00:00.000Z +Amabile |Gomatam |1992-11-18T00:00:00.000Z|1992-01-01T00:00:00.000Z +Anneke |Preusig |1989-06-02T00:00:00.000Z|1989-01-01T00:00:00.000Z +// end::docsDateTrunc-result[] +; + +docsDateTruncHistogram +// tag::docsDateTruncHistogram[] +FROM employees +| EVAL year = DATE_TRUNC(1 year, hire_date) +| STATS hires = COUNT(emp_no) BY year +| SORT year +// end::docsDateTruncHistogram[] +; + +// tag::docsDateTruncHistogram-result[] +hires:long | year:date +11 |1985-01-01T00:00:00.000Z +11 |1986-01-01T00:00:00.000Z +15 |1987-01-01T00:00:00.000Z +9 |1988-01-01T00:00:00.000Z +13 |1989-01-01T00:00:00.000Z +12 |1990-01-01T00:00:00.000Z +6 |1991-01-01T00:00:00.000Z +8 |1992-01-01T00:00:00.000Z +3 |1993-01-01T00:00:00.000Z +4 |1994-01-01T00:00:00.000Z +5 |1995-01-01T00:00:00.000Z +1 |1996-01-01T00:00:00.000Z +1 |1997-01-01T00:00:00.000Z +1 |1999-01-01T00:00:00.000Z +// end::docsDateTruncHistogram-result[] +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec index 95da19e38a05d..42c5401742e6e 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/docs.csv-spec @@ -290,20 +290,6 @@ Udi |Jansch |1.93 Uri |Lenart |1.75 ; - -dateExtract -// tag::dateExtract[] -ROW date = DATE_PARSE("yyyy-MM-dd", "2022-05-06") -| EVAL year = DATE_EXTRACT("year", date) -// end::dateExtract[] -; - -// tag::dateExtract-result[] -date:date | year:long -2022-05-06T00:00:00.000Z | 2022 -// end::dateExtract-result[] -; - docsSubstring // tag::substring[] FROM employees diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec index e6486960c7e04..39d8a8bfa57e9 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/eval.csv-spec @@ -222,6 +222,24 @@ emp_no:integer | foldable:keyword | folded_mv:keyword 10002 | "foo,bar" | [foo, bar] ; +docsConcat +// tag::docsConcat[] +FROM employees +| KEEP first_name, last_name +| EVAL fullname = CONCAT(first_name, " ", last_name) +// end::docsConcat[] +| SORT first_name +| LIMIT 3 +; + +// tag::docsConcat-result[] +first_name:keyword | last_name:keyword | fullname:keyword +Alejandro |McAlpine |Alejandro McAlpine +Amabile |Gomatam |Amabile Gomatam +Anneke |Preusig |Anneke Preusig +// end::docsConcat-result[] +; + docsGettingStartedEval // tag::gs-eval[] FROM sample_data diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec index 887d931f4cd5c..baf6da2cd0bde 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ints.csv-spec @@ -410,6 +410,30 @@ hire_date:date | salary:integer | bs:double // end::auto_bucket-result[] ; +docsAutoBucketNumeric +//tag::docsAutoBucketNumeric[] +FROM employees +| EVAL bs = AUTO_BUCKET(salary, 20, 25324, 74999) +| STATS COUNT(*) by bs +| SORT bs +//end::docsAutoBucketNumeric[] +; + +//tag::docsAutoBucketNumeric-result[] + COUNT(*):long | bs:double +9 |25000.0 +9 |30000.0 +18 |35000.0 +11 |40000.0 +11 |45000.0 +10 |50000.0 +7 |55000.0 +9 |60000.0 +8 |65000.0 +8 |70000.0 +//end::docsAutoBucketNumeric-result[] +; + cos ROW a=2 | EVAL cos=COS(a); diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec index 02e9db6ededf1..0b2ce54d5fd22 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/ip.csv-spec @@ -178,14 +178,21 @@ eth2 |epsilon |[fe81::cae2:65ff:fece:feb9, fe82::cae2:65ff:fece ; cdirMatchMultipleArgs#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] -from hosts | where cidr_match(ip1, "127.0.0.2/32", "127.0.0.3/32") | keep card, host, ip0, ip1; +//tag::cdirMatchMultipleArgs[] +FROM hosts +| WHERE CIDR_MATCH(ip1, "127.0.0.2/32", "127.0.0.3/32") +| KEEP card, host, ip0, ip1 +//end::cdirMatchMultipleArgs[] +; ignoreOrder:true -warning:Line 1:20: evaluation of [cidr_match(ip1, \"127.0.0.2/32\", \"127.0.0.3/32\")] failed, treating result as null. Only first 20 failures recorded. -warning:Line 1:20: java.lang.IllegalArgumentException: single-value function encountered multi-value +warning:Line 2:9: evaluation of [CIDR_MATCH(ip1, \"127.0.0.2/32\", \"127.0.0.3/32\")] failed, treating result as null. Only first 20 failures recorded. +warning:Line 2:9: java.lang.IllegalArgumentException: single-value function encountered multi-value +//tag::cdirMatchMultipleArgs-result[] card:keyword |host:keyword |ip0:ip |ip1:ip eth1 |beta |127.0.0.1 |127.0.0.2 eth0 |gamma |fe80::cae2:65ff:fece:feb9|127.0.0.3 +//end::cdirMatchMultipleArgs-result[] ; cidrMatchFunctionArg#[skip:-8.11.99, reason:Lucene multivalue warning introduced in 8.12 only] diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec index daf153051bb89..31b9d6101d2c5 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/math.csv-spec @@ -1118,3 +1118,34 @@ emp_no:integer | min_plus_max:integer | are_equal:boolean 10004 | 7 | false 10005 | 3 | false ; + +docsAbs +//tag::docsAbs[] +ROW number = -1.0 +| EVAL abs_number = ABS(number) +//end::docsAbs[] +; + +//tag::docsAbs-result[] +number:double | abs_number:double +-1.0 |1.0 +//end::docsAbs-result[] +; + +docsAbsEmployees +//tag::docsAbsEmployees[] +FROM employees +| KEEP first_name, last_name, height +| EVAL abs_height = ABS(0.0 - height) +//end::docsAbsEmployees[] +| SORT first_name +| LIMIT 3 +; + +//tag::docsAbsEmployees-result[] +first_name:keyword | last_name:keyword | height:double | abs_height:double +Alejandro |McAlpine |1.48 |1.48 +Amabile |Gomatam |2.09 |2.09 +Anneke |Preusig |1.56 |1.56 +//end::docsAbsEmployees-result[] +; \ No newline at end of file diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec index a3bc9c6c6dcf6..3a1ae3985e129 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/mv_expand.csv-spec @@ -316,3 +316,11 @@ a:keyword | e:keyword a | a ; + +//see https://github.com/elastic/elasticsearch/issues/103331 +keepStarMvExpand#[skip:-8.12.99] +from employees | where emp_no == 10001 | keep * | mv_expand first_name; + +avg_worked_seconds:long | birth_date:date | emp_no:integer | first_name:keyword | gender:keyword | height:double | height.float:double | height.half_float:double | height.scaled_float:double | hire_date:date | is_rehired:boolean | job_positions:keyword | languages:integer | languages.byte:integer | languages.long:long | languages.short:integer | last_name:keyword | salary:integer | salary_change:double | salary_change.int:integer | salary_change.keyword:keyword | salary_change.long:long | still_hired:boolean +268728049 | 1953-09-02T00:00:00.000Z | 10001 | Georgi | M | 2.03 | 2.0299999713897705 | 2.029296875 | 2.0300000000000002 | 1986-06-26T00:00:00.000Z | [false, true] | [Accountant, Senior Python Developer] | 2 | 2 | 2 | 2 | Facello | 57305 | 1.19 | 1 | 1.19 | 1 | true +; diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec index 083bd1eaf8417..ea09d981b7bf3 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/show.csv-spec @@ -74,13 +74,13 @@ tanh |"double tanh(n:integer|long|double|unsigned_long)" tau |? tau() | null | null | null |? | "" | null | false to_bool |"boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | |false |false to_boolean |"boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|keyword|text|double|long|unsigned_long|integer" | |boolean | |false |false -to_cartesianpoint |? to_cartesianpoint(arg1:?) |arg1 |? | "" |? | "" | false | false +to_cartesianpoint |"cartesian_point to_cartesianpoint(v:cartesian_point|long|unsigned_long|keyword|text)" |v |"cartesian_point|long|unsigned_long|keyword|text" | |cartesian_point | |false |false to_datetime |"date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | |false |false to_dbl |"double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | |false |false to_degrees |"double to_degrees(v:double|long|unsigned_long|integer)" |v |"double|long|unsigned_long|integer" | |double | |false |false to_double |"double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |double | |false |false to_dt |"date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" |v |"date|keyword|text|double|long|unsigned_long|integer" | |date | |false |false -to_geopoint |? to_geopoint(arg1:?) |arg1 |? | "" |? | "" | false | false +to_geopoint |"geo_point to_geopoint(v:geo_point|long|unsigned_long|keyword|text)" |v |"geo_point|long|unsigned_long|keyword|text" | |geo_point | |false |false to_int |"integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | |false |false to_integer |"integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" |v |"boolean|date|keyword|text|double|long|unsigned_long|integer" | |integer | |false |false to_ip |"ip to_ip(v:ip|keyword|text)" |v |"ip|keyword|text" | |ip | |false |false @@ -97,7 +97,7 @@ trim |"keyword|text trim(str:keyword|text)" ; -showFunctionsSynopsis#[skip:-8.11.99] +showFunctionsSynopsis#[skip:-8.12.99] show functions | keep synopsis; synopsis:keyword @@ -165,13 +165,13 @@ synopsis:keyword ? tau() "boolean to_bool(v:boolean|keyword|text|double|long|unsigned_long|integer)" "boolean to_boolean(v:boolean|keyword|text|double|long|unsigned_long|integer)" -? to_cartesianpoint(arg1:?) +"cartesian_point to_cartesianpoint(v:cartesian_point|long|unsigned_long|keyword|text)" "date to_datetime(v:date|keyword|text|double|long|unsigned_long|integer)" "double to_dbl(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "double to_degrees(v:double|long|unsigned_long|integer)" "double to_double(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "date to_dt(v:date|keyword|text|double|long|unsigned_long|integer)" -? to_geopoint(arg1:?) +"geo_point to_geopoint(v:geo_point|long|unsigned_long|keyword|text)" "integer to_int(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "integer to_integer(v:boolean|date|keyword|text|double|long|unsigned_long|integer)" "ip to_ip(v:ip|keyword|text)" diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec index dc96d1736858c..0ad759feeeea0 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/stats.csv-spec @@ -782,3 +782,12 @@ FROM sample_data median_duration:double | client_ip:ip ; + +fieldEscaping#[skip:-8.12.99, reason:Fixed bug in 8.13 of removing the leading/trailing backquotes of an identifier] +FROM sample_data +| stats count(`event_duration`) | keep `count(``event_duration``)` +; + +count(`event_duration`):l +7 +; diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java index 9b5012e56a3ff..0590caf2019b4 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java @@ -9,6 +9,7 @@ import org.elasticsearch.Build; import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction; import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; @@ -17,6 +18,7 @@ import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.health.node.selection.HealthNode; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.indices.breaker.CircuitBreakerService; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; @@ -53,11 +55,25 @@ public void ensureBlocksReleased() { CircuitBreaker reqBreaker = breakerService.getBreaker(CircuitBreaker.REQUEST); try { assertBusy(() -> { - logger.info("running tasks: {}", client().admin().cluster().prepareListTasks().get()); + logger.info( + "running tasks: {}", + client().admin() + .cluster() + .prepareListTasks() + .get() + .getTasks() + .stream() + .filter( + // Skip the tasks we that'd get in the way while debugging + t -> false == t.action().contains(TransportListTasksAction.TYPE.name()) + && false == t.action().contains(HealthNode.TASK_NAME) + ) + .toList() + ); assertThat("Request breaker not reset to 0 on node: " + node, reqBreaker.getUsed(), equalTo(0L)); }); } catch (Exception e) { - assertThat("Request breaker not reset to 0 on node: " + node, reqBreaker.getUsed(), equalTo(0L)); + throw new RuntimeException("failed waiting for breakers to clear", e); } } } @@ -80,6 +96,11 @@ public List> getSettings() { BlockFactory.LOCAL_BREAKER_OVER_RESERVED_MAX_SIZE_SETTING, ByteSizeValue.ofBytes(randomIntBetween(0, 16 * 1024)), Setting.Property.NodeScope + ), + Setting.byteSizeSetting( + BlockFactory.MAX_BLOCK_PRIMITIVE_ARRAY_SIZE_SETTING, + ByteSizeValue.ofBytes(randomLongBetween(1, BlockFactory.DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE.getBytes())), + Setting.Property.NodeScope ) ); } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPausableIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPausableIntegTestCase.java new file mode 100644 index 0000000000000..7a5072120e5af --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractPausableIntegTestCase.java @@ -0,0 +1,165 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.util.CollectionUtils; +import org.elasticsearch.index.engine.SegmentsStats; +import org.elasticsearch.index.mapper.OnScriptError; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.LongFieldScript; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.search.lookup.SearchLookup; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.junit.Before; + +import java.io.IOException; +import java.util.Collection; +import java.util.Map; +import java.util.Set; +import java.util.concurrent.Semaphore; +import java.util.concurrent.TimeUnit; + +/** A pausable testcase. Subclasses extend this testcase to simulate slow running queries. + * + * Uses the evaluation of a runtime field in the mappings "pause_me" of type long, along + * with a custom script language "pause", and semaphore "scriptPermits", to block execution. + */ +public abstract class AbstractPausableIntegTestCase extends AbstractEsqlIntegTestCase { + + private static final Logger LOGGER = LogManager.getLogger(AbstractPausableIntegTestCase.class); + + protected static final Semaphore scriptPermits = new Semaphore(0); + + protected int pageSize = -1; + + protected int numberOfDocs = -1; + + @Override + protected Collection> nodePlugins() { + return CollectionUtils.appendToCopy(super.nodePlugins(), PausableFieldPlugin.class); + } + + protected int pageSize() { + if (pageSize == -1) { + pageSize = between(10, 100); + } + return pageSize; + } + + protected int numberOfDocs() { + if (numberOfDocs == -1) { + numberOfDocs = between(4 * pageSize(), 5 * pageSize()); + } + return numberOfDocs; + } + + @Before + public void setupIndex() throws IOException { + assumeTrue("requires query pragmas", canUseQueryPragmas()); + + XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); + mapping.startObject("runtime"); + { + mapping.startObject("pause_me"); + { + mapping.field("type", "long"); + mapping.startObject("script").field("source", "").field("lang", "pause").endObject(); + } + mapping.endObject(); + } + mapping.endObject(); + client().admin() + .indices() + .prepareCreate("test") + .setSettings(Map.of("number_of_shards", 1, "number_of_replicas", 0)) + .setMapping(mapping.endObject()) + .get(); + + BulkRequestBuilder bulk = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + for (int i = 0; i < numberOfDocs(); i++) { + bulk.add(prepareIndex("test").setId(Integer.toString(i)).setSource("foo", i)); + } + bulk.get(); + /* + * forceMerge so we can be sure that we don't bump into tiny + * segments that finish super quickly and cause us to report strange + * statuses when we expect "starting". + */ + client().admin().indices().prepareForceMerge("test").setMaxNumSegments(1).get(); + /* + * Double super extra paranoid check that force merge worked. It's + * failed to reduce the index to a single segment and caused this test + * to fail in very difficult to debug ways. If it fails again, it'll + * trip here. Or maybe it won't! And we'll learn something. Maybe + * it's ghosts. + */ + SegmentsStats stats = client().admin().indices().prepareStats("test").get().getPrimaries().getSegments(); + if (stats.getCount() != 1L) { + fail(Strings.toString(stats)); + } + } + + public static class PausableFieldPlugin extends Plugin implements ScriptPlugin { + + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new ScriptEngine() { + @Override + public String getType() { + return "pause"; + } + + @Override + @SuppressWarnings("unchecked") + public FactoryType compile( + String name, + String code, + ScriptContext context, + Map params + ) { + return (FactoryType) new LongFieldScript.Factory() { + @Override + public LongFieldScript.LeafFactory newFactory( + String fieldName, + Map params, + SearchLookup searchLookup, + OnScriptError onScriptError + ) { + return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { + @Override + public void execute() { + try { + assertTrue(scriptPermits.tryAcquire(1, TimeUnit.MINUTES)); + } catch (Exception e) { + throw new AssertionError(e); + } + LOGGER.debug("--> emitting value"); + emit(1); + } + }; + } + }; + } + + @Override + public Set> getSupportedContexts() { + return Set.of(LongFieldScript.CONTEXT); + } + }; + } + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AsyncEsqlQueryActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AsyncEsqlQueryActionIT.java new file mode 100644 index 0000000000000..b58a0cd66b904 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AsyncEsqlQueryActionIT.java @@ -0,0 +1,257 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.tasks.TaskInfo; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.async.DeleteAsyncResultAction; +import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest; +import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; +import org.elasticsearch.xpack.esql.plugin.QueryPragmas; +import org.hamcrest.core.IsEqual; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.List; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.TimeValue.timeValueMillis; +import static org.elasticsearch.core.TimeValue.timeValueMinutes; +import static org.elasticsearch.core.TimeValue.timeValueSeconds; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isEmpty; +import static org.elasticsearch.test.hamcrest.OptionalMatchers.isPresent; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.notNullValue; + +/** + * Individual tests for specific aspects of the async query API. + */ +public class AsyncEsqlQueryActionIT extends AbstractPausableIntegTestCase { + + @Override + protected Collection> nodePlugins() { + ArrayList> actions = new ArrayList<>(super.nodePlugins()); + actions.add(EsqlAsyncActionIT.LocalStateEsqlAsync.class); + actions.add(InternalExchangePlugin.class); + return Collections.unmodifiableList(actions); + } + + @Override + protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { + return Settings.builder() + .put(ExchangeService.INACTIVE_SINKS_INTERVAL_SETTING, TimeValue.timeValueMillis(between(500, 2000))) + .build(); + } + + public void testBasicAsyncExecution() throws Exception { + try (var initialResponse = sendAsyncQuery()) { + assertThat(initialResponse.asyncExecutionId(), isPresent()); + assertThat(initialResponse.isRunning(), is(true)); + String id = initialResponse.asyncExecutionId().get(); + + if (randomBoolean()) { + // let's timeout first + var getResultsRequest = new GetAsyncResultRequest(id); + getResultsRequest.setWaitForCompletionTimeout(timeValueMillis(10)); + getResultsRequest.setKeepAlive(randomKeepAlive()); + var future = client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest); + try (var responseWithTimeout = future.get()) { + assertThat(initialResponse.asyncExecutionId(), isPresent()); + assertThat(responseWithTimeout.asyncExecutionId().get(), equalTo(id)); + assertThat(responseWithTimeout.isRunning(), is(true)); + } + } + + // Now we wait + var getResultsRequest = new GetAsyncResultRequest(id); + getResultsRequest.setWaitForCompletionTimeout(timeValueSeconds(60)); + getResultsRequest.setKeepAlive(randomKeepAlive()); + var future = client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest); + + // release the permits to allow the query to proceed + scriptPermits.release(numberOfDocs()); + + try (var finalResponse = future.get()) { + assertThat(finalResponse, notNullValue()); + assertThat(finalResponse.isRunning(), is(false)); + assertThat(finalResponse.columns(), equalTo(List.of(new ColumnInfo("sum(pause_me)", "long")))); + assertThat(getValuesList(finalResponse).size(), equalTo(1)); + } + + // Get the stored result (again) + var again = client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest); + try (var finalResponse = again.get()) { + assertThat(finalResponse, notNullValue()); + assertThat(finalResponse.isRunning(), is(false)); + assertThat(finalResponse.columns(), equalTo(List.of(new ColumnInfo("sum(pause_me)", "long")))); + assertThat(getValuesList(finalResponse).size(), equalTo(1)); + } + + AcknowledgedResponse deleteResponse = deleteAsyncId(id); + assertThat(deleteResponse.isAcknowledged(), equalTo(true)); + // the stored response should no longer be retrievable + var e = expectThrows(ResourceNotFoundException.class, () -> deleteAsyncId(id)); + assertThat(e.getMessage(), IsEqual.equalTo(id)); + } finally { + scriptPermits.drainPermits(); + } + } + + public void testAsyncCancellation() throws Exception { + try (var initialResponse = sendAsyncQuery()) { + assertThat(initialResponse.asyncExecutionId(), isPresent()); + assertThat(initialResponse.isRunning(), is(true)); + String id = initialResponse.asyncExecutionId().get(); + + DeleteAsyncResultRequest request = new DeleteAsyncResultRequest(id); + var future = client().execute(DeleteAsyncResultAction.INSTANCE, request); + + // there should be just one task + List tasks = getEsqlQueryTasks(); + assertThat(tasks.size(), is(1)); + + // release the permits to allow the query to proceed + scriptPermits.release(numberOfDocs()); + + var deleteResponse = future.actionGet(timeValueSeconds(60)); + assertThat(deleteResponse.isAcknowledged(), equalTo(true)); + + // there should be no tasks after delete + tasks = getEsqlQueryTasks(); + assertThat(tasks.size(), is(0)); + + // the stored response should no longer be retrievable + var getResultsRequest = new GetAsyncResultRequest(id); + getResultsRequest.setKeepAlive(timeValueMinutes(10)); + getResultsRequest.setWaitForCompletionTimeout(timeValueSeconds(60)); + var e = expectThrows( + ResourceNotFoundException.class, + () -> client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest).actionGet() + ); + assertThat(e.getMessage(), equalTo(id)); + } finally { + scriptPermits.drainPermits(); + } + } + + public void testFinishingBeforeTimeoutKeep() { + testFinishingBeforeTimeout(true); + } + + public void testFinishingBeforeTimeoutDoNotKeep() { + testFinishingBeforeTimeout(false); + } + + private void testFinishingBeforeTimeout(boolean keepOnCompletion) { + // don't block the query execution at all + scriptPermits.drainPermits(); + assert scriptPermits.availablePermits() == 0; + + scriptPermits.release(numberOfDocs()); + + var request = new EsqlQueryRequestBuilder(client()).query("from test | stats sum(pause_me)") + .pragmas(queryPragmas()) + .async(true) + .waitForCompletionTimeout(TimeValue.timeValueSeconds(60)) + .keepOnCompletion(keepOnCompletion) + .keepAlive(randomKeepAlive()); + + try (var response = request.execute().actionGet(60, TimeUnit.SECONDS)) { + assertThat(response.isRunning(), is(false)); + assertThat(response.columns(), equalTo(List.of(new ColumnInfo("sum(pause_me)", "long")))); + assertThat(getValuesList(response).size(), equalTo(1)); + + if (keepOnCompletion) { + assertThat(response.asyncExecutionId(), isPresent()); + // we should be able to retrieve the response by id, since it has been kept + String id = response.asyncExecutionId().get(); + var getResultsRequest = new GetAsyncResultRequest(id); + getResultsRequest.setWaitForCompletionTimeout(timeValueSeconds(60)); + var future = client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest); + try (var resp = future.actionGet(60, TimeUnit.SECONDS)) { + assertThat(resp.asyncExecutionId().get(), equalTo(id)); + assertThat(resp.isRunning(), is(false)); + assertThat(resp.columns(), equalTo(List.of(new ColumnInfo("sum(pause_me)", "long")))); + assertThat(getValuesList(resp).size(), equalTo(1)); + } + } else { + assertThat(response.asyncExecutionId(), isEmpty()); + } + } finally { + scriptPermits.drainPermits(); + } + } + + private List getEsqlQueryTasks() throws Exception { + List foundTasks = new ArrayList<>(); + assertBusy(() -> { + List tasks = client().admin() + .cluster() + .prepareListTasks() + .setActions(EsqlQueryAction.NAME + "[a]") + .setDetailed(true) + .get() + .getTasks(); + foundTasks.addAll(tasks); + }); + return foundTasks; + } + + private EsqlQueryResponse sendAsyncQuery() { + scriptPermits.drainPermits(); + assert scriptPermits.availablePermits() == 0; + + scriptPermits.release(between(1, 5)); + var pragmas = queryPragmas(); + return new EsqlQueryRequestBuilder(client()).query("from test | stats sum(pause_me)") + .pragmas(pragmas) + .async(true) + // deliberately small timeout, to frequently trigger incomplete response + .waitForCompletionTimeout(TimeValue.timeValueNanos(1)) + .keepOnCompletion(randomBoolean()) + .keepAlive(randomKeepAlive()) + .execute() + .actionGet(60, TimeUnit.SECONDS); + } + + private QueryPragmas queryPragmas() { + return new QueryPragmas( + Settings.builder() + // Force shard partitioning because that's all the tests know how to match. It is easier to reason about too. + .put("data_partitioning", "shard") + // Limit the page size to something small so we do more than one page worth of work, so we get more status updates. + .put("page_size", pageSize()) + .build() + ); + } + + private AcknowledgedResponse deleteAsyncId(String id) { + DeleteAsyncResultRequest request = new DeleteAsyncResultRequest(id); + return client().execute(DeleteAsyncResultAction.INSTANCE, request).actionGet(timeValueSeconds(60)); + } + + TimeValue randomKeepAlive() { + return TimeValue.parseTimeValue(randomTimeValue(1, 5, "d"), "test"); + } + + public static class LocalStateEsqlAsync extends LocalStateCompositeXPackPlugin { + public LocalStateEsqlAsync(final Settings settings, final Path configPath) { + super(settings, configPath); + } + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java index e499d3b783bb8..e249504f7e2a1 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionTaskIT.java @@ -12,45 +12,26 @@ import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; import org.elasticsearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.elasticsearch.action.bulk.BulkRequestBuilder; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; import org.elasticsearch.compute.operator.DriverStatus; import org.elasticsearch.compute.operator.DriverTaskRunner; import org.elasticsearch.compute.operator.exchange.ExchangeSinkOperator; import org.elasticsearch.compute.operator.exchange.ExchangeSourceOperator; -import org.elasticsearch.index.engine.SegmentsStats; -import org.elasticsearch.index.mapper.OnScriptError; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.plugins.ScriptPlugin; -import org.elasticsearch.script.LongFieldScript; -import org.elasticsearch.script.ScriptContext; -import org.elasticsearch.script.ScriptEngine; -import org.elasticsearch.search.lookup.SearchLookup; import org.elasticsearch.tasks.TaskCancelledException; import org.elasticsearch.tasks.TaskId; import org.elasticsearch.tasks.TaskInfo; import org.elasticsearch.test.junit.annotations.TestLogging; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.json.JsonXContent; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; import org.junit.Before; -import java.io.IOException; import java.util.ArrayList; -import java.util.Collection; import java.util.List; -import java.util.Map; import java.util.Set; -import java.util.concurrent.Semaphore; -import java.util.concurrent.TimeUnit; import static org.elasticsearch.test.MapMatcher.assertMap; import static org.elasticsearch.test.MapMatcher.matchesMap; @@ -71,83 +52,34 @@ value = "org.elasticsearch.xpack.esql:TRACE,org.elasticsearch.compute:TRACE", reason = "These tests were failing frequently, let's learn as much as we can" ) -public class EsqlActionTaskIT extends AbstractEsqlIntegTestCase { - private static int PAGE_SIZE; - private static int NUM_DOCS; +public class EsqlActionTaskIT extends AbstractPausableIntegTestCase { - private static String READ_DESCRIPTION; - private static String MERGE_DESCRIPTION; private static final Logger LOGGER = LogManager.getLogger(EsqlActionTaskIT.class); - @Override - protected Collection> nodePlugins() { - return CollectionUtils.appendToCopy(super.nodePlugins(), PausableFieldPlugin.class); - } + private String READ_DESCRIPTION; + private String MERGE_DESCRIPTION; @Before - public void setupIndex() throws IOException { + public void setup() { assumeTrue("requires query pragmas", canUseQueryPragmas()); - PAGE_SIZE = between(10, 100); - NUM_DOCS = between(4 * PAGE_SIZE, 5 * PAGE_SIZE); READ_DESCRIPTION = """ - \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = PAGE_SIZE, limit = 2147483647] + \\_LuceneSourceOperator[dataPartitioning = SHARD, maxPageSize = pageSize(), limit = 2147483647] \\_ValuesSourceReaderOperator[fields = [pause_me]] \\_AggregationOperator[mode = INITIAL, aggs = sum of longs] - \\_ExchangeSinkOperator""".replace("PAGE_SIZE", Integer.toString(PAGE_SIZE)); + \\_ExchangeSinkOperator""".replace("pageSize()", Integer.toString(pageSize())); MERGE_DESCRIPTION = """ \\_ExchangeSourceOperator[] \\_AggregationOperator[mode = FINAL, aggs = sum of longs] \\_ProjectOperator[projection = [0]] \\_LimitOperator[limit = 500] \\_OutputOperator[columns = [sum(pause_me)]]"""; - - XContentBuilder mapping = JsonXContent.contentBuilder().startObject(); - mapping.startObject("runtime"); - { - mapping.startObject("pause_me"); - { - mapping.field("type", "long"); - mapping.startObject("script").field("source", "").field("lang", "pause").endObject(); - } - mapping.endObject(); - } - mapping.endObject(); - client().admin() - .indices() - .prepareCreate("test") - .setSettings(Map.of("number_of_shards", 1, "number_of_replicas", 0)) - .setMapping(mapping.endObject()) - .get(); - - BulkRequestBuilder bulk = client().prepareBulk().setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - for (int i = 0; i < NUM_DOCS; i++) { - bulk.add(prepareIndex("test").setId(Integer.toString(i)).setSource("foo", i)); - } - bulk.get(); - /* - * forceMerge so we can be sure that we don't bump into tiny - * segments that finish super quickly and cause us to report strange - * statuses when we expect "starting". - */ - client().admin().indices().prepareForceMerge("test").setMaxNumSegments(1).get(); - /* - * Double super extra paranoid check that force merge worked. It's - * failed to reduce the index to a single segment and caused this test - * to fail in very difficult to debug ways. If it fails again, it'll - * trip here. Or maybe it won't! And we'll learn something. Maybe - * it's ghosts. - */ - SegmentsStats stats = client().admin().indices().prepareStats("test").get().getPrimaries().getSegments(); - if (stats.getCount() != 1L) { - fail(Strings.toString(stats)); - } } public void testTaskContents() throws Exception { ActionFuture response = startEsql(); try { getTasksStarting(); - scriptPermits.release(PAGE_SIZE); + scriptPermits.release(pageSize()); List foundTasks = getTasksRunning(); int luceneSources = 0; int valuesSourceReaders = 0; @@ -158,9 +90,11 @@ public void testTaskContents() throws Exception { assertThat(status.sessionId(), not(emptyOrNullString())); for (DriverStatus.OperatorStatus o : status.activeOperators()) { logger.info("status {}", o); - if (o.operator().startsWith("LuceneSourceOperator[maxPageSize=" + PAGE_SIZE)) { + if (o.operator().startsWith("LuceneSourceOperator[maxPageSize=" + pageSize())) { LuceneSourceOperator.Status oStatus = (LuceneSourceOperator.Status) o.status(); assertThat(oStatus.processedSlices(), lessThanOrEqualTo(oStatus.totalSlices())); + assertThat(oStatus.processedQueries(), equalTo(Set.of("*:*"))); + assertThat(oStatus.processedShards(), equalTo(Set.of("test:0"))); assertThat(oStatus.sliceIndex(), lessThanOrEqualTo(oStatus.totalSlices())); assertThat(oStatus.sliceMin(), greaterThanOrEqualTo(0)); assertThat(oStatus.sliceMax(), greaterThanOrEqualTo(oStatus.sliceMin())); @@ -204,9 +138,9 @@ public void testTaskContents() throws Exception { assertThat(exchangeSinks, greaterThanOrEqualTo(1)); assertThat(exchangeSources, equalTo(1)); } finally { - scriptPermits.release(NUM_DOCS); + scriptPermits.release(numberOfDocs()); try (EsqlQueryResponse esqlResponse = response.get()) { - assertThat(Iterators.flatMap(esqlResponse.values(), i -> i).next(), equalTo((long) NUM_DOCS)); + assertThat(Iterators.flatMap(esqlResponse.values(), i -> i).next(), equalTo((long) numberOfDocs())); } } } @@ -219,7 +153,7 @@ public void testCancelRead() throws Exception { cancelTask(running.taskId()); assertCancelled(response); } finally { - scriptPermits.release(NUM_DOCS); + scriptPermits.release(numberOfDocs()); } } @@ -231,7 +165,7 @@ public void testCancelMerge() throws Exception { cancelTask(running.taskId()); assertCancelled(response); } finally { - scriptPermits.release(NUM_DOCS); + scriptPermits.release(numberOfDocs()); } } @@ -249,7 +183,7 @@ public void testCancelEsqlTask() throws Exception { cancelTask(tasks.get(0).taskId()); assertCancelled(response); } finally { - scriptPermits.release(NUM_DOCS); + scriptPermits.release(numberOfDocs()); } } @@ -261,7 +195,7 @@ private ActionFuture startEsql() { // Force shard partitioning because that's all the tests know how to match. It is easier to reason about too. .put("data_partitioning", "shard") // Limit the page size to something small so we do more than one page worth of work, so we get more status updates. - .put("page_size", PAGE_SIZE) + .put("page_size", pageSize()) // Report the status after every action .put("status_interval", "0ms") .build() @@ -274,7 +208,7 @@ private void cancelTask(TaskId taskId) { request.setWaitForCompletion(false); LOGGER.debug("--> cancelling task [{}] without waiting for completion", taskId); client().admin().cluster().execute(CancelTasksAction.INSTANCE, request).actionGet(); - scriptPermits.release(NUM_DOCS); + scriptPermits.release(numberOfDocs()); request = new CancelTasksRequest().setTargetTaskId(taskId).setReason("test cancel"); request.setWaitForCompletion(true); LOGGER.debug("--> cancelling task [{}] with waiting for completion", taskId); @@ -367,56 +301,4 @@ private void assertCancelled(ActionFuture response) throws Ex ) ); } - - private static final Semaphore scriptPermits = new Semaphore(0); - - public static class PausableFieldPlugin extends Plugin implements ScriptPlugin { - @Override - public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { - return new ScriptEngine() { - @Override - public String getType() { - return "pause"; - } - - @Override - @SuppressWarnings("unchecked") - public FactoryType compile( - String name, - String code, - ScriptContext context, - Map params - ) { - return (FactoryType) new LongFieldScript.Factory() { - @Override - public LongFieldScript.LeafFactory newFactory( - String fieldName, - Map params, - SearchLookup searchLookup, - OnScriptError onScriptError - ) { - return ctx -> new LongFieldScript(fieldName, params, searchLookup, onScriptError, ctx) { - @Override - public void execute() { - try { - assertTrue(scriptPermits.tryAcquire(1, TimeUnit.MINUTES)); - } catch (Exception e) { - throw new AssertionError(e); - } - LOGGER.debug("--> emitting value"); - emit(1); - } - }; - } - }; - } - - @Override - public Set> getSupportedContexts() { - return Set.of(LongFieldScript.CONTEXT); - } - }; - } - } - } diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java new file mode 100644 index 0000000000000..b2685ac2c8276 --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlAsyncActionIT.java @@ -0,0 +1,120 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.ElasticsearchTimeoutException; +import org.elasticsearch.ResourceNotFoundException; +import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.xpack.core.LocalStateCompositeXPackPlugin; +import org.elasticsearch.xpack.core.async.DeleteAsyncResultAction; +import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest; +import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; +import org.elasticsearch.xpack.esql.plugin.QueryPragmas; + +import java.nio.file.Path; +import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; +import java.util.concurrent.TimeUnit; + +import static org.elasticsearch.core.TimeValue.timeValueSeconds; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.core.IsEqual.equalTo; + +/** + * Runs test scenarios from EsqlActionIT, with an extra level of indirection + * through the async query and async get APIs. + */ +public class EsqlAsyncActionIT extends EsqlActionIT { + + @Override + protected Collection> nodePlugins() { + ArrayList> actions = new ArrayList<>(super.nodePlugins()); + actions.add(LocalStateEsqlAsync.class); + return Collections.unmodifiableList(actions); + } + + @Override + protected EsqlQueryResponse run(String esqlCommands, QueryPragmas pragmas, QueryBuilder filter) { + EsqlQueryRequest request = new EsqlQueryRequest(); + request.query(esqlCommands); + request.pragmas(pragmas); + request.async(true); + // deliberately small timeout, to frequently trigger incomplete response + request.waitForCompletionTimeout(TimeValue.timeValueNanos(1)); + request.keepOnCompletion(randomBoolean()); + if (filter != null) { + request.filter(filter); + } + + var response = run(request); + if (response.asyncExecutionId().isPresent()) { + String id = response.asyncExecutionId().get(); + assertThat(response.isRunning(), is(true)); + assertThat(response.columns(), is(empty())); // no partial results + assertThat(response.pages(), is(empty())); + response.close(); + var getResponse = getAsyncResponse(id); + assertDeletable(id); + return getResponse; + } else { + return response; + } + } + + void assertDeletable(String id) { + var resp = deleteAsyncId(id); + assertTrue(resp.isAcknowledged()); + // the stored response should no longer be retrievable + var e = expectThrows(ResourceNotFoundException.class, () -> getAsyncResponse(id)); + assertThat(e.getMessage(), equalTo(id)); + } + + EsqlQueryResponse getAsyncResponse(String id) { + try { + var getResultsRequest = new GetAsyncResultRequest(id).setWaitForCompletionTimeout(timeValueSeconds(60)); + return client().execute(EsqlAsyncGetResultAction.INSTANCE, getResultsRequest).actionGet(30, TimeUnit.SECONDS); + } catch (ElasticsearchTimeoutException e) { + throw new AssertionError("timeout", e); + } + } + + AcknowledgedResponse deleteAsyncId(String id) { + try { + DeleteAsyncResultRequest request = new DeleteAsyncResultRequest(id); + return client().execute(DeleteAsyncResultAction.INSTANCE, request).actionGet(30, TimeUnit.SECONDS); + } catch (ElasticsearchTimeoutException e) { + throw new AssertionError("timeout", e); + } + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102455") + // junit.framework.AssertionFailedError: Unexpected exception type, expected VerificationException but got + // org.elasticsearch.common.io.stream.NotSerializableExceptionWrapper: verification_exception: Found 1 problem + @Override + public void testOverlappingIndexPatterns() throws Exception { + super.testOverlappingIndexPatterns(); + } + + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/102455") + @Override + public void testIndexPatterns() throws Exception { + super.testOverlappingIndexPatterns(); + } + + public static class LocalStateEsqlAsync extends LocalStateCompositeXPackPlugin { + public LocalStateEsqlAsync(final Settings settings, final Path configPath) { + super(settings, configPath); + } + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java index 7828ba97ed62b..a39439d33bfba 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/ManyShardsIT.java @@ -60,8 +60,10 @@ public void testConcurrentQueries() throws Exception { throw new AssertionError(e); } final var pragmas = Settings.builder(); - if (canUseQueryPragmas()) { - pragmas.put(randomPragmas().getSettings()).put("exchange_concurrent_clients", between(1, 2)); + if (randomBoolean() && canUseQueryPragmas()) { + pragmas.put(randomPragmas().getSettings()) + .put("task_concurrency", between(1, 2)) + .put("exchange_concurrent_clients", between(1, 2)); } run("from test-* | stats count(user) by tags", new QueryPragmas(pragmas.build())).close(); }); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeBasedIndicesIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeBasedIndicesIT.java new file mode 100644 index 0000000000000..a1fbee17ef8ec --- /dev/null +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/TimeBasedIndicesIT.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.action.bulk.BulkRequestBuilder; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.index.query.RangeQueryBuilder; + +import java.util.List; + +import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.getValuesList; +import static org.hamcrest.Matchers.hasSize; + +public class TimeBasedIndicesIT extends AbstractEsqlIntegTestCase { + + public void testFilter() { + long epoch = System.currentTimeMillis(); + assertAcked(client().admin().indices().prepareCreate("test").setMapping("@timestamp", "type=date", "value", "type=long")); + BulkRequestBuilder bulk = client().prepareBulk("test").setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + int oldDocs = between(10, 100); + for (int i = 0; i < oldDocs; i++) { + long timestamp = epoch - TimeValue.timeValueHours(between(1, 2)).millis(); + bulk.add(new IndexRequest().source("@timestamp", timestamp, "value", -i)); + } + int newDocs = between(10, 100); + for (int i = 0; i < newDocs; i++) { + long timestamp = epoch + TimeValue.timeValueHours(between(1, 2)).millis(); + bulk.add(new IndexRequest().source("@timestamp", timestamp, "value", i)); + } + bulk.get(); + { + EsqlQueryRequest request = new EsqlQueryRequest(); + request.query("FROM test | limit 1000"); + request.filter(new RangeQueryBuilder("@timestamp").from(epoch - TimeValue.timeValueHours(3).millis()).to("now")); + try (var resp = run(request)) { + List> values = getValuesList(resp); + assertThat(values, hasSize(oldDocs)); + } + } + { + EsqlQueryRequest request = new EsqlQueryRequest(); + request.query("FROM test | limit 1000"); + request.filter(new RangeQueryBuilder("@timestamp").from("now").to(epoch + TimeValue.timeValueHours(3).millis())); + try (var resp = run(request)) { + List> values = getValuesList(resp); + assertThat(values, hasSize(newDocs)); + } + } + } +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java index 12897979a47e0..fb6d23695f837 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/WarningsIT.java @@ -8,23 +8,21 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.transport.TransportService; import java.util.List; import java.util.Map; +import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -@TestLogging(value = "org.elasticsearch.xpack.esql:TRACE", reason = "debug") public class WarningsIT extends AbstractEsqlIntegTestCase { - public void testCollectWarnings() { + public void testCollectWarnings() throws Exception { final String node1, node2; if (randomBoolean()) { internalCluster().ensureAtLeastNumDataNodes(2); @@ -64,19 +62,23 @@ public void testCollectWarnings() { EsqlQueryRequest request = new EsqlQueryRequest(); request.query("FROM index-* | EVAL ip = to_ip(host) | STATS s = COUNT(*) by ip | KEEP ip | LIMIT 100"); request.pragmas(randomPragmas()); - PlainActionFuture future = new PlainActionFuture<>(); - client(coordinator.getName()).execute(EsqlQueryAction.INSTANCE, request, ActionListener.runBefore(future, () -> { - var threadpool = internalCluster().getInstance(TransportService.class, coordinator.getName()).getThreadPool(); - Map> responseHeaders = threadpool.getThreadContext().getResponseHeaders(); - List warnings = responseHeaders.getOrDefault("Warning", List.of()) - .stream() - .filter(w -> w.contains("is not an IP string literal")) - .toList(); - int expectedWarnings = Math.min(20, numDocs1 + numDocs2); - // we cap the number of warnings per node - assertThat(warnings.size(), greaterThanOrEqualTo(expectedWarnings)); + CountDownLatch latch = new CountDownLatch(1); + client(coordinator.getName()).execute(EsqlQueryAction.INSTANCE, request, ActionListener.running(() -> { + try { + var threadpool = internalCluster().getInstance(TransportService.class, coordinator.getName()).getThreadPool(); + Map> responseHeaders = threadpool.getThreadContext().getResponseHeaders(); + List warnings = responseHeaders.getOrDefault("Warning", List.of()) + .stream() + .filter(w -> w.contains("is not an IP string literal")) + .toList(); + int expectedWarnings = Math.min(20, numDocs1 + numDocs2); + // we cap the number of warnings per node + assertThat(warnings.size(), greaterThanOrEqualTo(expectedWarnings)); + } finally { + latch.countDown(); + } })); - future.actionGet(30, TimeUnit.SECONDS).close(); + latch.await(30, TimeUnit.SECONDS); } private DiscoveryNode randomDataNode() { diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 index 747c1fdcd1921..dbaefa2e5aebf 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.g4 @@ -1,24 +1,24 @@ lexer grammar EsqlBaseLexer; -DISSECT : 'dissect' -> pushMode(EXPRESSION); -DROP : 'drop' -> pushMode(SOURCE_IDENTIFIERS); -ENRICH : 'enrich' -> pushMode(SOURCE_IDENTIFIERS); -EVAL : 'eval' -> pushMode(EXPRESSION); -EXPLAIN : 'explain' -> pushMode(EXPLAIN_MODE); -FROM : 'from' -> pushMode(SOURCE_IDENTIFIERS); -GROK : 'grok' -> pushMode(EXPRESSION); -INLINESTATS : 'inlinestats' -> pushMode(EXPRESSION); -KEEP : 'keep' -> pushMode(SOURCE_IDENTIFIERS); -LIMIT : 'limit' -> pushMode(EXPRESSION); -MV_EXPAND : 'mv_expand' -> pushMode(SOURCE_IDENTIFIERS); -PROJECT : 'project' -> pushMode(SOURCE_IDENTIFIERS); -RENAME : 'rename' -> pushMode(SOURCE_IDENTIFIERS); -ROW : 'row' -> pushMode(EXPRESSION); -SHOW : 'show' -> pushMode(EXPRESSION); -SORT : 'sort' -> pushMode(EXPRESSION); -STATS : 'stats' -> pushMode(EXPRESSION); -WHERE : 'where' -> pushMode(EXPRESSION); -UNKNOWN_CMD : ~[ \r\n\t[\]/]+ -> pushMode(EXPRESSION); +DISSECT : 'dissect' -> pushMode(EXPRESSION_MODE); +DROP : 'drop' -> pushMode(PROJECT_MODE); +ENRICH : 'enrich' -> pushMode(ENRICH_MODE); +EVAL : 'eval' -> pushMode(EXPRESSION_MODE); +EXPLAIN : 'explain' -> pushMode(EXPLAIN_MODE); +FROM : 'from' -> pushMode(FROM_MODE); +GROK : 'grok' -> pushMode(EXPRESSION_MODE); +INLINESTATS : 'inlinestats' -> pushMode(EXPRESSION_MODE); +KEEP : 'keep' -> pushMode(PROJECT_MODE); +LIMIT : 'limit' -> pushMode(EXPRESSION_MODE); +MV_EXPAND : 'mv_expand' -> pushMode(MVEXPAND_MODE); +PROJECT : 'project' -> pushMode(PROJECT_MODE); +RENAME : 'rename' -> pushMode(RENAME_MODE); +ROW : 'row' -> pushMode(EXPRESSION_MODE); +SHOW : 'show' -> pushMode(SHOW_MODE); +SORT : 'sort' -> pushMode(EXPRESSION_MODE); +STATS : 'stats' -> pushMode(EXPRESSION_MODE); +WHERE : 'where' -> pushMode(EXPRESSION_MODE); +UNKNOWN_CMD : ~[ \r\n\t[\]/]+ -> pushMode(EXPRESSION_MODE); LINE_COMMENT : '//' ~[\r\n]* '\r'? '\n'? -> channel(HIDDEN) @@ -31,16 +31,20 @@ MULTILINE_COMMENT WS : [ \r\n\t]+ -> channel(HIDDEN) ; - - +// +// Explain +// mode EXPLAIN_MODE; -EXPLAIN_OPENING_BRACKET : '[' -> type(OPENING_BRACKET), pushMode(DEFAULT_MODE); -EXPLAIN_PIPE : '|' -> type(PIPE), popMode; +EXPLAIN_OPENING_BRACKET : OPENING_BRACKET -> type(OPENING_BRACKET), pushMode(DEFAULT_MODE); +EXPLAIN_PIPE : PIPE -> type(PIPE), popMode; EXPLAIN_WS : WS -> channel(HIDDEN); EXPLAIN_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN); EXPLAIN_MULTILINE_COMMENT : MULTILINE_COMMENT -> channel(HIDDEN); -mode EXPRESSION; +// +// Expression - used by most command +// +mode EXPRESSION_MODE; PIPE : '|' -> popMode; @@ -64,6 +68,27 @@ fragment EXPONENT : [Ee] [+-]? DIGIT+ ; +fragment ASPERAND + : '@' + ; + +fragment BACKQUOTE + : '`' + ; + +fragment BACKQUOTE_BLOCK + : ~'`' + | '``' + ; + +fragment UNDERSCORE + : '_' + ; + +fragment UNQUOTED_ID_BODY + : (LETTER | DIGIT | UNDERSCORE) + ; + STRING : '"' (ESCAPE_SEQUENCE | UNESCAPED_CHARS)* '"' | '"""' (~[\r\n])*? '"""' '"'? '"'? @@ -103,8 +128,6 @@ PARAM: '?'; RLIKE: 'rlike'; RP : ')'; TRUE : 'true'; -INFO : 'info'; -FUNCTIONS : 'functions'; EQ : '=='; NEQ : '!='; @@ -124,19 +147,18 @@ PERCENT : '%'; // mode. Thus, the two popModes on CLOSING_BRACKET. The other way could as // the start of a multivalued field constant. To line up with the double pop // the explain mode needs, we double push when we see that. -OPENING_BRACKET : '[' -> pushMode(EXPRESSION), pushMode(EXPRESSION); +OPENING_BRACKET : '[' -> pushMode(EXPRESSION_MODE), pushMode(EXPRESSION_MODE); CLOSING_BRACKET : ']' -> popMode, popMode; - UNQUOTED_IDENTIFIER - : LETTER (LETTER | DIGIT | '_')* + : LETTER UNQUOTED_ID_BODY* // only allow @ at beginning of identifier to keep the option to allow @ as infix operator in the future // also, single `_` and `@` characters are not valid identifiers - | ('_' | '@') (LETTER | DIGIT | '_')+ + | (UNDERSCORE | ASPERAND) UNQUOTED_ID_BODY+ ; QUOTED_IDENTIFIER - : '`' ( ~'`' | '``' )* '`' + : BACKQUOTE BACKQUOTE_BLOCK+ BACKQUOTE ; EXPR_LINE_COMMENT @@ -150,42 +172,204 @@ EXPR_MULTILINE_COMMENT EXPR_WS : WS -> channel(HIDDEN) ; +// +// FROM command +// +mode FROM_MODE; +FROM_PIPE : PIPE -> type(PIPE), popMode; +FROM_OPENING_BRACKET : OPENING_BRACKET -> type(OPENING_BRACKET), pushMode(FROM_MODE), pushMode(FROM_MODE); +FROM_CLOSING_BRACKET : CLOSING_BRACKET -> type(CLOSING_BRACKET), popMode, popMode; +FROM_COMMA : COMMA -> type(COMMA); +FROM_ASSIGN : ASSIGN -> type(ASSIGN); +METADATA: 'metadata'; +fragment FROM_UNQUOTED_IDENTIFIER_PART + : ~[=`|,[\]/ \t\r\n] + | '/' ~[*/] // allow single / but not followed by another / or * which would start a comment + ; -mode SOURCE_IDENTIFIERS; +FROM_UNQUOTED_IDENTIFIER + : FROM_UNQUOTED_IDENTIFIER_PART+ + ; + +FROM_QUOTED_IDENTIFIER + : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) + ; + +FROM_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +FROM_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +FROM_WS + : WS -> channel(HIDDEN) + ; +// +// DROP, KEEP, PROJECT +// +mode PROJECT_MODE; +PROJECT_PIPE : PIPE -> type(PIPE), popMode; +PROJECT_DOT: DOT -> type(DOT); +PROJECT_COMMA : COMMA -> type(COMMA); + +fragment UNQUOTED_ID_BODY_WITH_PATTERN + : (LETTER | DIGIT | UNDERSCORE | ASTERISK) + ; + +PROJECT_UNQUOTED_IDENTIFIER + : (LETTER | ASTERISK) UNQUOTED_ID_BODY_WITH_PATTERN* + | (UNDERSCORE | ASPERAND) UNQUOTED_ID_BODY_WITH_PATTERN+ + ; + +PROJECT_QUOTED_IDENTIFIER + : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) + ; + +PROJECT_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +PROJECT_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +PROJECT_WS + : WS -> channel(HIDDEN) + ; +// +// | RENAME a.b AS x, c AS y +// +mode RENAME_MODE; +RENAME_PIPE : PIPE -> type(PIPE), popMode; +RENAME_ASSIGN : ASSIGN -> type(ASSIGN); +RENAME_COMMA : COMMA -> type(COMMA); +RENAME_DOT: DOT -> type(DOT); -SRC_PIPE : '|' -> type(PIPE), popMode; -SRC_OPENING_BRACKET : '[' -> type(OPENING_BRACKET), pushMode(SOURCE_IDENTIFIERS), pushMode(SOURCE_IDENTIFIERS); -SRC_CLOSING_BRACKET : ']' -> popMode, popMode, type(CLOSING_BRACKET); -SRC_COMMA : ',' -> type(COMMA); -SRC_ASSIGN : '=' -> type(ASSIGN); AS : 'as'; -METADATA: 'metadata'; -ON : 'on'; -WITH : 'with'; -SRC_UNQUOTED_IDENTIFIER - : SRC_UNQUOTED_IDENTIFIER_PART+ +RENAME_QUOTED_IDENTIFIER + : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) ; -fragment SRC_UNQUOTED_IDENTIFIER_PART - : ~[=`|,[\]/ \t\r\n]+ - | '/' ~[*/] // allow single / but not followed by another / or * which would start a comment +// use the unquoted pattern to let the parser invalidate fields with * +RENAME_UNQUOTED_IDENTIFIER + : PROJECT_UNQUOTED_IDENTIFIER -> type(PROJECT_UNQUOTED_IDENTIFIER) + ; + +RENAME_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) ; -SRC_QUOTED_IDENTIFIER - : QUOTED_IDENTIFIER +RENAME_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) ; -SRC_LINE_COMMENT +RENAME_WS + : WS -> channel(HIDDEN) + ; + +// | ENRICH ON key WITH fields +mode ENRICH_MODE; +ENRICH_PIPE : PIPE -> type(PIPE), popMode; + +ON : 'on' -> pushMode(ENRICH_FIELD_MODE); +WITH : 'with' -> pushMode(ENRICH_FIELD_MODE); + +// use the unquoted pattern to let the parser invalidate fields with * +ENRICH_POLICY_UNQUOTED_IDENTIFIER + : FROM_UNQUOTED_IDENTIFIER -> type(FROM_UNQUOTED_IDENTIFIER) + ; + +ENRICH_QUOTED_IDENTIFIER + : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) + ; + +ENRICH_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +ENRICH_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +ENRICH_WS + : WS -> channel(HIDDEN) + ; + +// submode for Enrich to allow different lexing between policy identifier (loose) and field identifiers +mode ENRICH_FIELD_MODE; +ENRICH_FIELD_PIPE : PIPE -> type(PIPE), popMode, popMode; +ENRICH_FIELD_ASSIGN : ASSIGN -> type(ASSIGN); +ENRICH_FIELD_COMMA : COMMA -> type(COMMA); +ENRICH_FIELD_DOT: DOT -> type(DOT); + +ENRICH_FIELD_WITH : WITH -> type(WITH) ; + +ENRICH_FIELD_UNQUOTED_IDENTIFIER + : PROJECT_UNQUOTED_IDENTIFIER -> type(PROJECT_UNQUOTED_IDENTIFIER) + ; + +ENRICH_FIELD_QUOTED_IDENTIFIER + : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) + ; + +ENRICH_FIELD_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +ENRICH_FIELD_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +ENRICH_FIELD_WS + : WS -> channel(HIDDEN) + ; + +mode MVEXPAND_MODE; +MVEXPAND_PIPE : PIPE -> type(PIPE), popMode; +MVEXPAND_DOT: DOT -> type(DOT); + +MVEXPAND_QUOTED_IDENTIFIER + : QUOTED_IDENTIFIER -> type(QUOTED_IDENTIFIER) + ; + +MVEXPAND_UNQUOTED_IDENTIFIER + : UNQUOTED_IDENTIFIER -> type(UNQUOTED_IDENTIFIER) + ; + +MVEXPAND_LINE_COMMENT + : LINE_COMMENT -> channel(HIDDEN) + ; + +MVEXPAND_MULTILINE_COMMENT + : MULTILINE_COMMENT -> channel(HIDDEN) + ; + +MVEXPAND_WS + : WS -> channel(HIDDEN) + ; + +// +// SHOW INFO +// +mode SHOW_MODE; +SHOW_PIPE : PIPE -> type(PIPE), popMode; + +INFO : 'info'; +FUNCTIONS : 'functions'; + +SHOW_LINE_COMMENT : LINE_COMMENT -> channel(HIDDEN) ; -SRC_MULTILINE_COMMENT +SHOW_MULTILINE_COMMENT : MULTILINE_COMMENT -> channel(HIDDEN) ; -SRC_WS +SHOW_WS : WS -> channel(HIDDEN) ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens deleted file mode 100644 index d8761f5eb0d73..0000000000000 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseLexer.tokens +++ /dev/null @@ -1,137 +0,0 @@ -DISSECT=1 -DROP=2 -ENRICH=3 -EVAL=4 -EXPLAIN=5 -FROM=6 -GROK=7 -INLINESTATS=8 -KEEP=9 -LIMIT=10 -MV_EXPAND=11 -PROJECT=12 -RENAME=13 -ROW=14 -SHOW=15 -SORT=16 -STATS=17 -WHERE=18 -UNKNOWN_CMD=19 -LINE_COMMENT=20 -MULTILINE_COMMENT=21 -WS=22 -EXPLAIN_WS=23 -EXPLAIN_LINE_COMMENT=24 -EXPLAIN_MULTILINE_COMMENT=25 -PIPE=26 -STRING=27 -INTEGER_LITERAL=28 -DECIMAL_LITERAL=29 -BY=30 -AND=31 -ASC=32 -ASSIGN=33 -COMMA=34 -DESC=35 -DOT=36 -FALSE=37 -FIRST=38 -LAST=39 -LP=40 -IN=41 -IS=42 -LIKE=43 -NOT=44 -NULL=45 -NULLS=46 -OR=47 -PARAM=48 -RLIKE=49 -RP=50 -TRUE=51 -INFO=52 -FUNCTIONS=53 -EQ=54 -NEQ=55 -LT=56 -LTE=57 -GT=58 -GTE=59 -PLUS=60 -MINUS=61 -ASTERISK=62 -SLASH=63 -PERCENT=64 -OPENING_BRACKET=65 -CLOSING_BRACKET=66 -UNQUOTED_IDENTIFIER=67 -QUOTED_IDENTIFIER=68 -EXPR_LINE_COMMENT=69 -EXPR_MULTILINE_COMMENT=70 -EXPR_WS=71 -AS=72 -METADATA=73 -ON=74 -WITH=75 -SRC_UNQUOTED_IDENTIFIER=76 -SRC_QUOTED_IDENTIFIER=77 -SRC_LINE_COMMENT=78 -SRC_MULTILINE_COMMENT=79 -SRC_WS=80 -EXPLAIN_PIPE=81 -'dissect'=1 -'drop'=2 -'enrich'=3 -'eval'=4 -'explain'=5 -'from'=6 -'grok'=7 -'inlinestats'=8 -'keep'=9 -'limit'=10 -'mv_expand'=11 -'project'=12 -'rename'=13 -'row'=14 -'show'=15 -'sort'=16 -'stats'=17 -'where'=18 -'by'=30 -'and'=31 -'asc'=32 -'desc'=35 -'.'=36 -'false'=37 -'first'=38 -'last'=39 -'('=40 -'in'=41 -'is'=42 -'like'=43 -'not'=44 -'null'=45 -'nulls'=46 -'or'=47 -'?'=48 -'rlike'=49 -')'=50 -'true'=51 -'info'=52 -'functions'=53 -'=='=54 -'!='=55 -'<'=56 -'<='=57 -'>'=58 -'>='=59 -'+'=60 -'-'=61 -'*'=62 -'/'=63 -'%'=64 -']'=66 -'as'=72 -'metadata'=73 -'on'=74 -'with'=75 diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 index 044e920744375..cdf0cea58b230 100644 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 +++ b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.g4 @@ -98,11 +98,11 @@ field ; fromCommand - : FROM sourceIdentifier (COMMA sourceIdentifier)* metadata? + : FROM fromIdentifier (COMMA fromIdentifier)* metadata? ; metadata - : OPENING_BRACKET METADATA sourceIdentifier (COMMA sourceIdentifier)* CLOSING_BRACKET + : OPENING_BRACKET METADATA fromIdentifier (COMMA fromIdentifier)* CLOSING_BRACKET ; @@ -122,21 +122,29 @@ grouping : qualifiedName (COMMA qualifiedName)* ; -sourceIdentifier - : SRC_UNQUOTED_IDENTIFIER - | SRC_QUOTED_IDENTIFIER +fromIdentifier + : FROM_UNQUOTED_IDENTIFIER + | QUOTED_IDENTIFIER ; qualifiedName : identifier (DOT identifier)* ; +qualifiedNamePattern + : identifierPattern (DOT identifierPattern)* + ; identifier : UNQUOTED_IDENTIFIER | QUOTED_IDENTIFIER ; +identifierPattern + : PROJECT_UNQUOTED_IDENTIFIER + | QUOTED_IDENTIFIER + ; + constant : NULL #nullLiteral | integerValue UNQUOTED_IDENTIFIER #qualifiedIntegerLiteral @@ -163,12 +171,12 @@ orderExpression ; keepCommand - : KEEP sourceIdentifier (COMMA sourceIdentifier)* - | PROJECT sourceIdentifier (COMMA sourceIdentifier)* + : KEEP qualifiedNamePattern (COMMA qualifiedNamePattern)* + | PROJECT qualifiedNamePattern (COMMA qualifiedNamePattern)* ; dropCommand - : DROP sourceIdentifier (COMMA sourceIdentifier)* + : DROP qualifiedNamePattern (COMMA qualifiedNamePattern)* ; renameCommand @@ -176,7 +184,7 @@ renameCommand ; renameClause: - oldName=sourceIdentifier AS newName=sourceIdentifier + oldName=qualifiedNamePattern AS newName=qualifiedNamePattern ; dissectCommand @@ -188,7 +196,7 @@ grokCommand ; mvExpandCommand - : MV_EXPAND sourceIdentifier + : MV_EXPAND qualifiedName ; commandOptions @@ -238,9 +246,9 @@ showCommand ; enrichCommand - : ENRICH policyName=sourceIdentifier (ON matchField=sourceIdentifier)? (WITH enrichWithClause (COMMA enrichWithClause)*)? + : ENRICH policyName=fromIdentifier (ON matchField=qualifiedNamePattern)? (WITH enrichWithClause (COMMA enrichWithClause)*)? ; enrichWithClause - : (newName=sourceIdentifier ASSIGN)? enrichField=sourceIdentifier + : (newName=qualifiedNamePattern ASSIGN)? enrichField=qualifiedNamePattern ; diff --git a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens b/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens deleted file mode 100644 index d8761f5eb0d73..0000000000000 --- a/x-pack/plugin/esql/src/main/antlr/EsqlBaseParser.tokens +++ /dev/null @@ -1,137 +0,0 @@ -DISSECT=1 -DROP=2 -ENRICH=3 -EVAL=4 -EXPLAIN=5 -FROM=6 -GROK=7 -INLINESTATS=8 -KEEP=9 -LIMIT=10 -MV_EXPAND=11 -PROJECT=12 -RENAME=13 -ROW=14 -SHOW=15 -SORT=16 -STATS=17 -WHERE=18 -UNKNOWN_CMD=19 -LINE_COMMENT=20 -MULTILINE_COMMENT=21 -WS=22 -EXPLAIN_WS=23 -EXPLAIN_LINE_COMMENT=24 -EXPLAIN_MULTILINE_COMMENT=25 -PIPE=26 -STRING=27 -INTEGER_LITERAL=28 -DECIMAL_LITERAL=29 -BY=30 -AND=31 -ASC=32 -ASSIGN=33 -COMMA=34 -DESC=35 -DOT=36 -FALSE=37 -FIRST=38 -LAST=39 -LP=40 -IN=41 -IS=42 -LIKE=43 -NOT=44 -NULL=45 -NULLS=46 -OR=47 -PARAM=48 -RLIKE=49 -RP=50 -TRUE=51 -INFO=52 -FUNCTIONS=53 -EQ=54 -NEQ=55 -LT=56 -LTE=57 -GT=58 -GTE=59 -PLUS=60 -MINUS=61 -ASTERISK=62 -SLASH=63 -PERCENT=64 -OPENING_BRACKET=65 -CLOSING_BRACKET=66 -UNQUOTED_IDENTIFIER=67 -QUOTED_IDENTIFIER=68 -EXPR_LINE_COMMENT=69 -EXPR_MULTILINE_COMMENT=70 -EXPR_WS=71 -AS=72 -METADATA=73 -ON=74 -WITH=75 -SRC_UNQUOTED_IDENTIFIER=76 -SRC_QUOTED_IDENTIFIER=77 -SRC_LINE_COMMENT=78 -SRC_MULTILINE_COMMENT=79 -SRC_WS=80 -EXPLAIN_PIPE=81 -'dissect'=1 -'drop'=2 -'enrich'=3 -'eval'=4 -'explain'=5 -'from'=6 -'grok'=7 -'inlinestats'=8 -'keep'=9 -'limit'=10 -'mv_expand'=11 -'project'=12 -'rename'=13 -'row'=14 -'show'=15 -'sort'=16 -'stats'=17 -'where'=18 -'by'=30 -'and'=31 -'asc'=32 -'desc'=35 -'.'=36 -'false'=37 -'first'=38 -'last'=39 -'('=40 -'in'=41 -'is'=42 -'like'=43 -'not'=44 -'null'=45 -'nulls'=46 -'or'=47 -'?'=48 -'rlike'=49 -')'=50 -'true'=51 -'info'=52 -'functions'=53 -'=='=54 -'!='=55 -'<'=56 -'<='=57 -'>'=58 -'>='=59 -'+'=60 -'-'=61 -'*'=62 -'/'=63 -'%'=64 -']'=66 -'as'=72 -'metadata'=73 -'on'=74 -'with'=75 diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncGetResultAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncGetResultAction.java new file mode 100644 index 0000000000000..1603dd8fd3746 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlAsyncGetResultAction.java @@ -0,0 +1,21 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.action.ActionType; + +public class EsqlAsyncGetResultAction extends ActionType { + + public static final EsqlAsyncGetResultAction INSTANCE = new EsqlAsyncGetResultAction(); + + public static final String NAME = "indices:data/read/esql/async/get"; + + private EsqlAsyncGetResultAction() { + super(NAME, in -> { throw new IllegalArgumentException("can't transport EsqlAsyncGetResultAction"); }); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java index 0de89a4d8de2a..5e8c5c27edd23 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequest.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.AbstractQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.tasks.CancellableTask; @@ -43,6 +44,9 @@ public class EsqlQueryRequest extends ActionRequest implements CompositeIndicesRequest { + public static TimeValue DEFAULT_KEEP_ALIVE = TimeValue.timeValueDays(5); + public static TimeValue DEFAULT_WAIT_FOR_COMPLETION = TimeValue.timeValueSeconds(1); + private static final ConstructingObjectParser PARAM_PARSER = new ConstructingObjectParser<>( "params", true, @@ -64,7 +68,14 @@ public class EsqlQueryRequest extends ActionRequest implements CompositeIndicesR private static final ParseField LOCALE_FIELD = new ParseField("locale"); private static final ParseField PROFILE_FIELD = new ParseField("profile"); - private static final ObjectParser PARSER = objectParser(EsqlQueryRequest::new); + static final ParseField WAIT_FOR_COMPLETION_TIMEOUT = new ParseField("wait_for_completion_timeout"); + static final ParseField KEEP_ALIVE = new ParseField("keep_alive"); + static final ParseField KEEP_ON_COMPLETION = new ParseField("keep_on_completion"); + + private static final ObjectParser SYNC_PARSER = objectParserSync(EsqlQueryRequest::syncEsqlQueryRequest); + private static final ObjectParser ASYNC_PARSER = objectParserAsync(EsqlQueryRequest::asyncEsqlQueryRequest); + + private boolean async; private String query; private boolean columnar; @@ -73,6 +84,21 @@ public class EsqlQueryRequest extends ActionRequest implements CompositeIndicesR private QueryBuilder filter; private QueryPragmas pragmas = new QueryPragmas(Settings.EMPTY); private List params = List.of(); + private TimeValue waitForCompletionTimeout = DEFAULT_WAIT_FOR_COMPLETION; + private TimeValue keepAlive = DEFAULT_KEEP_ALIVE; + private boolean keepOnCompletion; + + private static EsqlQueryRequest syncEsqlQueryRequest() { + return new EsqlQueryRequest(false); + } + + private static EsqlQueryRequest asyncEsqlQueryRequest() { + return new EsqlQueryRequest(true); + } + + private EsqlQueryRequest(boolean async) { + this.async = async; + } public EsqlQueryRequest(StreamInput in) throws IOException { super(in); @@ -100,6 +126,14 @@ public String query() { return query; } + public void async(boolean async) { + this.async = async; + } + + public boolean async() { + return async; + } + public void columnar(boolean columnar) { this.columnar = columnar; } @@ -155,12 +189,39 @@ public void params(List params) { this.params = params; } - public static EsqlQueryRequest fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); + public TimeValue waitForCompletionTimeout() { + return waitForCompletionTimeout; } - private static ObjectParser objectParser(Supplier supplier) { - ObjectParser parser = new ObjectParser<>("esql/query", false, supplier); + public void waitForCompletionTimeout(TimeValue waitForCompletionTimeout) { + this.waitForCompletionTimeout = waitForCompletionTimeout; + } + + public TimeValue keepAlive() { + return keepAlive; + } + + public void keepAlive(TimeValue keepAlive) { + this.keepAlive = keepAlive; + } + + public boolean keepOnCompletion() { + return keepOnCompletion; + } + + public void keepOnCompletion(boolean keepOnCompletion) { + this.keepOnCompletion = keepOnCompletion; + } + + public static EsqlQueryRequest fromXContentSync(XContentParser parser) { + return SYNC_PARSER.apply(parser, null); + } + + public static EsqlQueryRequest fromXContentAsync(XContentParser parser) { + return ASYNC_PARSER.apply(parser, null); + } + + private static void objectParserCommon(ObjectParser parser) { parser.declareString(EsqlQueryRequest::query, QUERY_FIELD); parser.declareBoolean(EsqlQueryRequest::columnar, COLUMNAR_FIELD); parser.declareObject(EsqlQueryRequest::filter, (p, c) -> AbstractQueryBuilder.parseTopLevelQuery(p), FILTER_FIELD); @@ -172,7 +233,30 @@ private static ObjectParser objectParser(Supplier request.locale(Locale.forLanguageTag(localeTag)), LOCALE_FIELD); parser.declareBoolean(EsqlQueryRequest::profile, PROFILE_FIELD); + } + + private static ObjectParser objectParserSync(Supplier supplier) { + ObjectParser parser = new ObjectParser<>("esql/query", false, supplier); + objectParserCommon(parser); + return parser; + } + private static ObjectParser objectParserAsync(Supplier supplier) { + ObjectParser parser = new ObjectParser<>("esql/async_query", false, supplier); + objectParserCommon(parser); + parser.declareBoolean(EsqlQueryRequest::keepOnCompletion, KEEP_ON_COMPLETION); + parser.declareField( + EsqlQueryRequest::waitForCompletionTimeout, + (p, c) -> TimeValue.parseTimeValue(p.text(), WAIT_FOR_COMPLETION_TIMEOUT.getPreferredName()), + WAIT_FOR_COMPLETION_TIMEOUT, + ObjectParser.ValueType.VALUE + ); + parser.declareField( + EsqlQueryRequest::keepAlive, + (p, c) -> TimeValue.parseTimeValue(p.text(), KEEP_ALIVE.getPreferredName()), + KEEP_ALIVE, + ObjectParser.ValueType.VALUE + ); return parser; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java index be3aeec190ded..4746ea81aa0c1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestBuilder.java @@ -9,6 +9,7 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.client.internal.ElasticsearchClient; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; @@ -37,4 +38,24 @@ public EsqlQueryRequestBuilder pragmas(QueryPragmas pragmas) { request.pragmas(pragmas); return this; } + + public EsqlQueryRequestBuilder waitForCompletionTimeout(TimeValue waitForCompletionTimeout) { + request.waitForCompletionTimeout(waitForCompletionTimeout); + return this; + } + + public EsqlQueryRequestBuilder keepAlive(TimeValue keepAlive) { + request.keepAlive(keepAlive); + return this; + } + + public EsqlQueryRequestBuilder keepOnCompletion(boolean keepOnCompletion) { + request.keepOnCompletion(keepOnCompletion); + return this; + } + + public EsqlQueryRequestBuilder async(boolean async) { + request.async(async); + return this; + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java index e571713420950..b6473e3bd03ce 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponse.java @@ -7,12 +7,9 @@ package org.elasticsearch.xpack.esql.action; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.TransportVersions; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Iterators; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -20,106 +17,91 @@ import org.elasticsearch.common.xcontent.ChunkedToXContent; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; -import org.elasticsearch.compute.data.BooleanBlock; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; -import org.elasticsearch.compute.lucene.UnsupportedValueSource; import org.elasticsearch.compute.operator.DriverProfile; +import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasable; import org.elasticsearch.core.Releasables; -import org.elasticsearch.search.DocValueFormat; -import org.elasticsearch.xcontent.InstantiatingObjectParser; -import org.elasticsearch.xcontent.ObjectParser; -import org.elasticsearch.xcontent.ParseField; import org.elasticsearch.xcontent.ToXContent; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xcontent.XContentParserConfiguration; -import org.elasticsearch.xcontent.json.JsonXContent; -import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; -import org.elasticsearch.xpack.esql.planner.PlannerUtils; -import org.elasticsearch.xpack.esql.type.EsqlDataTypes; -import org.elasticsearch.xpack.versionfield.Version; import java.io.IOException; -import java.io.UncheckedIOException; -import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; -import java.util.Map; import java.util.Objects; -import java.util.function.Function; - -import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; -import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; -import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; -import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; -import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; -import static org.elasticsearch.xpack.ql.util.StringUtils.parseIP; +import java.util.Optional; public class EsqlQueryResponse extends ActionResponse implements ChunkedToXContentObject, Releasable { - private static final InstantiatingObjectParser PARSER; - static { - InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( - "esql/query_response", - true, - EsqlQueryResponse.class - ); - parser.declareObjectArray(constructorArg(), (p, c) -> ColumnInfo.fromXContent(p), new ParseField("columns")); - parser.declareField(constructorArg(), (p, c) -> p.list(), new ParseField("values"), ObjectParser.ValueType.OBJECT_ARRAY); - PARSER = parser.build(); - } + + private final AbstractRefCounted counted = AbstractRefCounted.of(this::closeInternal); private final List columns; private final List pages; private final Profile profile; private final boolean columnar; - - public EsqlQueryResponse(List columns, List pages, @Nullable Profile profile, boolean columnar) { + private final String asyncExecutionId; + private final boolean isRunning; + // True if this response is as a result of an async query request + private final boolean isAsync; + + public EsqlQueryResponse( + List columns, + List pages, + @Nullable Profile profile, + boolean columnar, + @Nullable String asyncExecutionId, + boolean isRunning, + boolean isAsync + ) { this.columns = columns; this.pages = pages; this.profile = profile; this.columnar = columnar; + this.asyncExecutionId = asyncExecutionId; + this.isRunning = isRunning; + this.isAsync = isAsync; } - public EsqlQueryResponse(List columns, List> values) { - this.columns = columns; - this.pages = List.of(valuesToPage(columns.stream().map(ColumnInfo::type).toList(), values)); - this.profile = null; - this.columnar = false; + public EsqlQueryResponse(List columns, List pages, @Nullable Profile profile, boolean columnar, boolean isAsync) { + this(columns, pages, profile, columnar, null, false, isAsync); } /** * Build a reader for the response. */ public static Writeable.Reader reader(BlockFactory blockFactory) { - return in -> new EsqlQueryResponse(new BlockStreamInput(in, blockFactory)); + return in -> deserialize(new BlockStreamInput(in, blockFactory)); } - private EsqlQueryResponse(BlockStreamInput in) throws IOException { - super(in); - this.columns = in.readCollectionAsList(ColumnInfo::new); - this.pages = in.readCollectionAsList(Page::new); + static EsqlQueryResponse deserialize(BlockStreamInput in) throws IOException { + String asyncExecutionId = null; + boolean isRunning = false; + boolean isAsync = false; + Profile profile = null; + if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_ASYNC_QUERY)) { + asyncExecutionId = in.readOptionalString(); + isRunning = in.readBoolean(); + isAsync = in.readBoolean(); + } + List columns = in.readCollectionAsList(ColumnInfo::new); + List pages = in.readCollectionAsList(Page::new); if (in.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE)) { - this.profile = in.readOptionalWriteable(Profile::new); - } else { - this.profile = null; + profile = in.readOptionalWriteable(Profile::new); } - this.columnar = in.readBoolean(); + boolean columnar = in.readBoolean(); + return new EsqlQueryResponse(columns, pages, profile, columnar, asyncExecutionId, isRunning, isAsync); } @Override public void writeTo(StreamOutput out) throws IOException { + if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_ASYNC_QUERY)) { + out.writeOptionalString(asyncExecutionId); + out.writeBoolean(isRunning); + out.writeBoolean(isAsync); + } out.writeCollection(columns); out.writeCollection(pages); if (out.getTransportVersion().onOrAfter(TransportVersions.ESQL_PROFILE)) { @@ -137,7 +119,8 @@ List pages() { } public Iterator> values() { - return pagesToValues(columns.stream().map(ColumnInfo::type).toList(), pages); + List dataTypes = columns.stream().map(ColumnInfo::type).toList(); + return ResponseValueUtils.pagesToValues(dataTypes, pages); } public Profile profile() { @@ -148,63 +131,42 @@ public boolean columnar() { return columnar; } - @Override - public Iterator toXContentChunked(ToXContent.Params params) { - final BytesRef scratch = new BytesRef(); - final Iterator valuesIt; - if (pages.isEmpty()) { - valuesIt = Collections.emptyIterator(); - } else if (columnar) { - valuesIt = Iterators.flatMap( - Iterators.forRange( - 0, - columns().size(), - column -> Iterators.concat( - Iterators.single(((builder, p) -> builder.startArray())), - Iterators.flatMap(pages.iterator(), page -> { - ColumnInfo.PositionToXContent toXContent = columns.get(column) - .positionToXContent(page.getBlock(column), scratch); - return Iterators.forRange( - 0, - page.getPositionCount(), - position -> (builder, p) -> toXContent.positionToXContent(builder, p, position) - ); - }), - ChunkedToXContentHelper.endArray() - ) - ), - Function.identity() - ); - } else { - valuesIt = Iterators.flatMap(pages.iterator(), page -> { - final int columnCount = columns.size(); - assert page.getBlockCount() == columnCount : page.getBlockCount() + " != " + columnCount; - final ColumnInfo.PositionToXContent[] toXContents = new ColumnInfo.PositionToXContent[columnCount]; - for (int column = 0; column < columnCount; column++) { - toXContents[column] = columns.get(column).positionToXContent(page.getBlock(column), scratch); + public Optional asyncExecutionId() { + return Optional.ofNullable(asyncExecutionId); + } + + public boolean isRunning() { + return isRunning; + } + + public boolean isAsync() { + return isRunning; + } + + private Iterator asyncPropertiesOrEmpty() { + if (isAsync) { + return ChunkedToXContentHelper.singleChunk((builder, params) -> { + if (asyncExecutionId != null) { + builder.field("id", asyncExecutionId); } - return Iterators.forRange(0, page.getPositionCount(), position -> (builder, p) -> { - builder.startArray(); - for (int c = 0; c < columnCount; c++) { - toXContents[c].positionToXContent(builder, p, position); - } - return builder.endArray(); - }); + builder.field("is_running", isRunning); + return builder; }); + } else { + return Collections.emptyIterator(); } - Iterator columnsRender = ChunkedToXContentHelper.singleChunk((builder, p) -> { - builder.startArray("columns"); - for (ColumnInfo col : columns) { - col.toXContent(builder, p); - } - return builder.endArray(); - }); + } + + @Override + public Iterator toXContentChunked(ToXContent.Params params) { + final Iterator valuesIt = ResponseXContentUtils.columnValues(this.columns, this.pages, columnar); Iterator profileRender = profile == null ? List.of().iterator() : ChunkedToXContentHelper.field("profile", profile, params); return Iterators.concat( ChunkedToXContentHelper.startObject(), - columnsRender, + asyncPropertiesOrEmpty(), + ResponseXContentUtils.columnHeadings(columns), ChunkedToXContentHelper.array("values", valuesIt), profileRender, ChunkedToXContentHelper.endObject() @@ -216,16 +178,14 @@ public boolean isFragment() { return false; } - public static EsqlQueryResponse fromXContent(XContentParser parser) { - return PARSER.apply(parser, null); - } - @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; EsqlQueryResponse that = (EsqlQueryResponse) o; return Objects.equals(columns, that.columns) + && Objects.equals(asyncExecutionId, that.asyncExecutionId) + && Objects.equals(isRunning, that.isRunning) && columnar == that.columnar && Iterators.equals(values(), that.values(), (row1, row2) -> Iterators.equals(row1, row2, Objects::equals)) && Objects.equals(profile, that.profile); @@ -233,7 +193,13 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(columns, Iterators.hashCode(values(), row -> Iterators.hashCode(row, Objects::hashCode)), columnar); + return Objects.hash( + asyncExecutionId, + isRunning, + columns, + Iterators.hashCode(values(), row -> Iterators.hashCode(row, Objects::hashCode)), + columnar + ); } @Override @@ -242,129 +208,32 @@ public String toString() { } @Override - public void close() { - Releasables.close(() -> Iterators.map(pages.iterator(), p -> p::releaseBlocks)); + public void incRef() { + tryIncRef(); } - public static Iterator> pagesToValues(List dataTypes, List pages) { - BytesRef scratch = new BytesRef(); - return Iterators.flatMap( - pages.iterator(), - page -> Iterators.forRange(0, page.getPositionCount(), p -> Iterators.forRange(0, page.getBlockCount(), b -> { - Block block = page.getBlock(b); - if (block.isNull(p)) { - return null; - } - /* - * Use the ESQL data type to map to the output to make sure compute engine - * respects its types. See the INTEGER clause where is doesn't always - * respect it. - */ - int count = block.getValueCount(p); - int start = block.getFirstValueIndex(p); - String dataType = dataTypes.get(b); - if (count == 1) { - return valueAt(dataType, block, start, scratch); - } - List thisResult = new ArrayList<>(count); - int end = count + start; - for (int i = start; i < end; i++) { - thisResult.add(valueAt(dataType, block, i, scratch)); - } - return thisResult; - })) - ); + @Override + public boolean tryIncRef() { + return counted.tryIncRef(); } - private static Object valueAt(String dataType, Block block, int offset, BytesRef scratch) { - return switch (dataType) { - case "unsigned_long" -> unsignedLongAsNumber(((LongBlock) block).getLong(offset)); - case "long" -> ((LongBlock) block).getLong(offset); - case "integer" -> ((IntBlock) block).getInt(offset); - case "double" -> ((DoubleBlock) block).getDouble(offset); - case "keyword", "text" -> ((BytesRefBlock) block).getBytesRef(offset, scratch).utf8ToString(); - case "ip" -> { - BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); - yield DocValueFormat.IP.format(val); - } - case "date" -> { - long longVal = ((LongBlock) block).getLong(offset); - yield UTC_DATE_TIME_FORMATTER.formatMillis(longVal); - } - case "boolean" -> ((BooleanBlock) block).getBoolean(offset); - case "version" -> new Version(((BytesRefBlock) block).getBytesRef(offset, scratch)).toString(); - case "geo_point" -> GEO.longAsPoint(((LongBlock) block).getLong(offset)); - case "cartesian_point" -> CARTESIAN.longAsPoint(((LongBlock) block).getLong(offset)); - case "unsupported" -> UnsupportedValueSource.UNSUPPORTED_OUTPUT; - case "_source" -> { - BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); - try { - try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, new BytesArray(val))) { - parser.nextToken(); - yield parser.mapOrdered(); - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - default -> throw EsqlIllegalArgumentException.illegalDataType(dataType); - }; + @Override + public boolean decRef() { + return counted.decRef(); } - /** - * Convert a list of values to Pages so we can parse from xcontent. It's not - * super efficient but it doesn't really have to be. - */ - private static Page valuesToPage(List dataTypes, List> values) { - List results = dataTypes.stream() - .map(c -> PlannerUtils.toElementType(EsqlDataTypes.fromName(c)).newBlockBuilder(values.size())) - .toList(); - - for (List row : values) { - for (int c = 0; c < row.size(); c++) { - var builder = results.get(c); - var value = row.get(c); - switch (dataTypes.get(c)) { - case "unsigned_long" -> ((LongBlock.Builder) builder).appendLong(asLongUnsigned(((Number) value).longValue())); - case "long" -> ((LongBlock.Builder) builder).appendLong(((Number) value).longValue()); - case "integer" -> ((IntBlock.Builder) builder).appendInt(((Number) value).intValue()); - case "double" -> ((DoubleBlock.Builder) builder).appendDouble(((Number) value).doubleValue()); - case "keyword", "text", "unsupported" -> ((BytesRefBlock.Builder) builder).appendBytesRef( - new BytesRef(value.toString()) - ); - case "ip" -> ((BytesRefBlock.Builder) builder).appendBytesRef(parseIP(value.toString())); - case "date" -> { - long longVal = UTC_DATE_TIME_FORMATTER.parseMillis(value.toString()); - ((LongBlock.Builder) builder).appendLong(longVal); - } - case "boolean" -> ((BooleanBlock.Builder) builder).appendBoolean(((Boolean) value)); - case "null" -> builder.appendNull(); - case "version" -> ((BytesRefBlock.Builder) builder).appendBytesRef(new Version(value.toString()).toBytesRef()); - case "_source" -> { - @SuppressWarnings("unchecked") - Map o = (Map) value; - try { - try (XContentBuilder sourceBuilder = JsonXContent.contentBuilder()) { - sourceBuilder.map(o); - ((BytesRefBlock.Builder) builder).appendBytesRef(BytesReference.bytes(sourceBuilder).toBytesRef()); - } - } catch (IOException e) { - throw new UncheckedIOException(e); - } - } - case "geo_point" -> { - long longVal = GEO.pointAsLong(GEO.stringAsPoint(value.toString())); - ((LongBlock.Builder) builder).appendLong(longVal); - } - case "cartesian_point" -> { - long longVal = CARTESIAN.pointAsLong(CARTESIAN.stringAsPoint(value.toString())); - ((LongBlock.Builder) builder).appendLong(longVal); - } - default -> throw EsqlIllegalArgumentException.illegalDataType(dataTypes.get(c)); - } - } - } - return new Page(results.stream().map(Block.Builder::build).toArray(Block[]::new)); + @Override + public boolean hasReferences() { + return counted.hasReferences(); + } + + @Override + public void close() { + decRef(); + } + + void closeInternal() { + Releasables.close(() -> Iterators.map(pages.iterator(), p -> p::releaseBlocks)); } public static class Profile implements Writeable, ChunkedToXContentObject { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryTask.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryTask.java new file mode 100644 index 0000000000000..917355b2d88b5 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlQueryTask.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.tasks.TaskId; +import org.elasticsearch.xpack.core.async.AsyncExecutionId; +import org.elasticsearch.xpack.core.async.StoredAsyncTask; + +import java.util.List; +import java.util.Map; + +public class EsqlQueryTask extends StoredAsyncTask { + + public EsqlQueryTask( + long id, + String type, + String action, + String description, + TaskId parentTaskId, + Map headers, + Map originHeaders, + AsyncExecutionId asyncExecutionId, + TimeValue keepAlive + ) { + super(id, type, action, description, parentTaskId, headers, originHeaders, asyncExecutionId, keepAlive); + } + + @Override + public EsqlQueryResponse getCurrentResult() { + return new EsqlQueryResponse(List.of(), List.of(), null, false, getExecutionId().getEncoded(), true, true); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java index ee641cd9209a7..7b525642009a7 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlResponseListener.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.esql.action; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.core.Releasable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; @@ -16,11 +17,12 @@ import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.RestStatus; -import org.elasticsearch.rest.action.RestResponseListener; +import org.elasticsearch.rest.action.RestRefCountedChunkedToXContentListener; import org.elasticsearch.xcontent.MediaType; import org.elasticsearch.xpack.esql.formatter.TextFormat; import org.elasticsearch.xpack.esql.plugin.EsqlMediaTypeParser; +import java.io.IOException; import java.util.Locale; import java.util.concurrent.TimeUnit; @@ -31,7 +33,7 @@ /** * Listens for a single {@link EsqlQueryResponse}, builds a corresponding {@link RestResponse} and sends it. */ -public class EsqlResponseListener extends RestResponseListener { +public final class EsqlResponseListener extends RestRefCountedChunkedToXContentListener { /** * A simple, thread-safe stop watch for timing a single action. * Allows to stop the time for building a response and to log it at a later point. @@ -118,8 +120,13 @@ public EsqlResponseListener(RestChannel channel, RestRequest restRequest, EsqlQu } @Override - public RestResponse buildResponse(EsqlQueryResponse esqlResponse) throws Exception { + protected void processResponse(EsqlQueryResponse esqlQueryResponse) throws IOException { + channel.sendResponse(buildResponse(esqlQueryResponse)); + } + + private RestResponse buildResponse(EsqlQueryResponse esqlResponse) throws IOException { boolean success = false; + final Releasable releasable = releasableFromResponse(esqlResponse); try { RestResponse restResponse; if (mediaType instanceof TextFormat format) { @@ -128,13 +135,13 @@ public RestResponse buildResponse(EsqlQueryResponse esqlResponse) throws Excepti ChunkedRestResponseBody.fromTextChunks( format.contentType(restRequest), format.format(restRequest, esqlResponse), - esqlResponse + releasable ) ); } else { restResponse = RestResponse.chunked( RestStatus.OK, - ChunkedRestResponseBody.fromXContent(esqlResponse, channel.request(), channel, esqlResponse) + ChunkedRestResponseBody.fromXContent(esqlResponse, channel.request(), channel, releasable) ); } long tookNanos = stopWatch.stop().getNanos(); @@ -143,7 +150,7 @@ public RestResponse buildResponse(EsqlQueryResponse esqlResponse) throws Excepti return restResponse; } finally { if (success == false) { - esqlResponse.close(); + releasable.close(); } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java new file mode 100644 index 0000000000000..8a4efa1d16a69 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseValueUtils.java @@ -0,0 +1,178 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.xcontent.XContentHelper; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BooleanBlock; +import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.DoubleBlock; +import org.elasticsearch.compute.data.IntBlock; +import org.elasticsearch.compute.data.LongBlock; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.lucene.UnsupportedValueSource; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; +import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.versionfield.Version; + +import java.io.IOException; +import java.io.UncheckedIOException; +import java.util.ArrayList; +import java.util.Iterator; +import java.util.List; +import java.util.Map; + +import static org.elasticsearch.xpack.ql.util.DateUtils.UTC_DATE_TIME_FORMATTER; +import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; +import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsNumber; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; +import static org.elasticsearch.xpack.ql.util.StringUtils.parseIP; + +/** + * Collection of static utility methods for helping transform response data between pages and values. + */ +public final class ResponseValueUtils { + + /** + * Returns an iterator of iterators over the values in the given pages. There is one iterator + * for each block. + */ + public static Iterator> pagesToValues(List dataTypes, List pages) { + BytesRef scratch = new BytesRef(); + return Iterators.flatMap( + pages.iterator(), + page -> Iterators.forRange(0, page.getPositionCount(), p -> Iterators.forRange(0, page.getBlockCount(), b -> { + Block block = page.getBlock(b); + if (block.isNull(p)) { + return null; + } + /* + * Use the ESQL data type to map to the output to make sure compute engine + * respects its types. See the INTEGER clause where is doesn't always + * respect it. + */ + int count = block.getValueCount(p); + int start = block.getFirstValueIndex(p); + String dataType = dataTypes.get(b); + if (count == 1) { + return valueAt(dataType, block, start, scratch); + } + List thisResult = new ArrayList<>(count); + int end = count + start; + for (int i = start; i < end; i++) { + thisResult.add(valueAt(dataType, block, i, scratch)); + } + return thisResult; + })) + ); + } + + private static Object valueAt(String dataType, Block block, int offset, BytesRef scratch) { + return switch (dataType) { + case "unsigned_long" -> unsignedLongAsNumber(((LongBlock) block).getLong(offset)); + case "long" -> ((LongBlock) block).getLong(offset); + case "integer" -> ((IntBlock) block).getInt(offset); + case "double" -> ((DoubleBlock) block).getDouble(offset); + case "keyword", "text" -> ((BytesRefBlock) block).getBytesRef(offset, scratch).utf8ToString(); + case "ip" -> { + BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); + yield DocValueFormat.IP.format(val); + } + case "date" -> { + long longVal = ((LongBlock) block).getLong(offset); + yield UTC_DATE_TIME_FORMATTER.formatMillis(longVal); + } + case "boolean" -> ((BooleanBlock) block).getBoolean(offset); + case "version" -> new Version(((BytesRefBlock) block).getBytesRef(offset, scratch)).toString(); + case "geo_point" -> GEO.longAsPoint(((LongBlock) block).getLong(offset)); + case "cartesian_point" -> CARTESIAN.longAsPoint(((LongBlock) block).getLong(offset)); + case "unsupported" -> UnsupportedValueSource.UNSUPPORTED_OUTPUT; + case "_source" -> { + BytesRef val = ((BytesRefBlock) block).getBytesRef(offset, scratch); + try { + try (XContentParser parser = XContentHelper.createParser(XContentParserConfiguration.EMPTY, new BytesArray(val))) { + parser.nextToken(); + yield parser.mapOrdered(); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + default -> throw EsqlIllegalArgumentException.illegalDataType(dataType); + }; + } + + /** + * Converts a list of values to Pages so that we can parse from xcontent. It's not + * super efficient, but it doesn't really have to be. + */ + static Page valuesToPage(BlockFactory blockFactory, List columns, List> values) { + List dataTypes = columns.stream().map(ColumnInfo::type).toList(); + List results = dataTypes.stream() + .map(c -> PlannerUtils.toElementType(EsqlDataTypes.fromName(c)).newBlockBuilder(values.size(), blockFactory)) + .toList(); + + for (List row : values) { + for (int c = 0; c < row.size(); c++) { + var builder = results.get(c); + var value = row.get(c); + switch (dataTypes.get(c)) { + case "unsigned_long" -> ((LongBlock.Builder) builder).appendLong(asLongUnsigned(((Number) value).longValue())); + case "long" -> ((LongBlock.Builder) builder).appendLong(((Number) value).longValue()); + case "integer" -> ((IntBlock.Builder) builder).appendInt(((Number) value).intValue()); + case "double" -> ((DoubleBlock.Builder) builder).appendDouble(((Number) value).doubleValue()); + case "keyword", "text", "unsupported" -> ((BytesRefBlock.Builder) builder).appendBytesRef( + new BytesRef(value.toString()) + ); + case "ip" -> ((BytesRefBlock.Builder) builder).appendBytesRef(parseIP(value.toString())); + case "date" -> { + long longVal = UTC_DATE_TIME_FORMATTER.parseMillis(value.toString()); + ((LongBlock.Builder) builder).appendLong(longVal); + } + case "boolean" -> ((BooleanBlock.Builder) builder).appendBoolean(((Boolean) value)); + case "null" -> builder.appendNull(); + case "version" -> ((BytesRefBlock.Builder) builder).appendBytesRef(new Version(value.toString()).toBytesRef()); + case "_source" -> { + @SuppressWarnings("unchecked") + Map o = (Map) value; + try { + try (XContentBuilder sourceBuilder = JsonXContent.contentBuilder()) { + sourceBuilder.map(o); + ((BytesRefBlock.Builder) builder).appendBytesRef(BytesReference.bytes(sourceBuilder).toBytesRef()); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + case "geo_point" -> { + long longVal = GEO.pointAsLong(GEO.stringAsPoint(value.toString())); + ((LongBlock.Builder) builder).appendLong(longVal); + } + case "cartesian_point" -> { + long longVal = CARTESIAN.pointAsLong(CARTESIAN.stringAsPoint(value.toString())); + ((LongBlock.Builder) builder).appendLong(longVal); + } + default -> throw EsqlIllegalArgumentException.illegalDataType(dataTypes.get(c)); + } + } + } + return new Page(results.stream().map(Block.Builder::build).toArray(Block[]::new)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java new file mode 100644 index 0000000000000..e28e6beebabed --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/ResponseXContentUtils.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.collect.Iterators; +import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.xcontent.ToXContent; + +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.function.Function; + +/** + * Collection of static utility methods for helping transform response data to XContent. + */ +final class ResponseXContentUtils { + + /** Returns the column headings for the given columns. */ + static Iterator columnHeadings(List columns) { + return ChunkedToXContentHelper.singleChunk((builder, params) -> { + builder.startArray("columns"); + for (ColumnInfo col : columns) { + col.toXContent(builder, params); + } + return builder.endArray(); + }); + } + + /** Returns the column values for the given pages (described by the column infos). */ + static Iterator columnValues(List columns, List pages, boolean columnar) { + if (pages.isEmpty()) { + return Collections.emptyIterator(); + } else if (columnar) { + return columnarValues(columns, pages); + } else { + return rowValues(columns, pages); + } + } + + /** Returns a columnar based representation of the values in the given pages (described by the column infos). */ + static Iterator columnarValues(List columns, List pages) { + final BytesRef scratch = new BytesRef(); + return Iterators.flatMap( + Iterators.forRange( + 0, + columns.size(), + column -> Iterators.concat( + Iterators.single(((builder, params) -> builder.startArray())), + Iterators.flatMap(pages.iterator(), page -> { + ColumnInfo.PositionToXContent toXContent = columns.get(column).positionToXContent(page.getBlock(column), scratch); + return Iterators.forRange( + 0, + page.getPositionCount(), + position -> (builder, params) -> toXContent.positionToXContent(builder, params, position) + ); + }), + ChunkedToXContentHelper.endArray() + ) + ), + Function.identity() + ); + } + + /** Returns a row based representation of the values in the given pages (described by the column infos). */ + static Iterator rowValues(List columns, List pages) { + final BytesRef scratch = new BytesRef(); + return Iterators.flatMap(pages.iterator(), page -> { + final int columnCount = columns.size(); + assert page.getBlockCount() == columnCount : page.getBlockCount() + " != " + columnCount; + final ColumnInfo.PositionToXContent[] toXContents = new ColumnInfo.PositionToXContent[columnCount]; + for (int column = 0; column < columnCount; column++) { + toXContents[column] = columns.get(column).positionToXContent(page.getBlock(column), scratch); + } + return Iterators.forRange(0, page.getPositionCount(), position -> (builder, params) -> { + builder.startArray(); + for (int c = 0; c < columnCount; c++) { + toXContents[c].positionToXContent(builder, params, position); + } + return builder.endArray(); + }); + }); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java new file mode 100644 index 0000000000000..04b37616b3ebf --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlAsyncQueryAction.java @@ -0,0 +1,62 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestCancellableNodeClient; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Collections; +import java.util.List; +import java.util.Set; + +import static org.elasticsearch.rest.RestRequest.Method.POST; +import static org.elasticsearch.xpack.esql.formatter.TextFormat.URL_PARAM_DELIMITER; + +public class RestEsqlAsyncQueryAction extends BaseRestHandler { + private static final Logger LOGGER = LogManager.getLogger(RestEsqlAsyncQueryAction.class); + + @Override + public String getName() { + return "esql_async_query"; + } + + @Override + public List routes() { + return List.of(new Route(POST, "/_query/async")); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { + EsqlQueryRequest esqlRequest; + try (XContentParser parser = request.contentOrSourceParamParser()) { + esqlRequest = EsqlQueryRequest.fromXContentAsync(parser); + } + + LOGGER.info("Beginning execution of ESQL async query.\nQuery string: [{}]", esqlRequest.query()); + + return channel -> { + RestCancellableNodeClient cancellableClient = new RestCancellableNodeClient(client, request.getHttpChannel()); + cancellableClient.execute( + EsqlQueryAction.INSTANCE, + esqlRequest, + new EsqlResponseListener(channel, request, esqlRequest).wrapWithLogging() + ); + }; + } + + @Override + protected Set responseParams() { + return Collections.singleton(URL_PARAM_DELIMITER); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlDeleteAsyncResultAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlDeleteAsyncResultAction.java new file mode 100644 index 0000000000000..1857e32e99b06 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlDeleteAsyncResultAction.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.async.DeleteAsyncResultAction; +import org.elasticsearch.xpack.core.async.DeleteAsyncResultRequest; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.DELETE; + +public class RestEsqlDeleteAsyncResultAction extends BaseRestHandler { + @Override + public List routes() { + return List.of(new RestHandler.Route(DELETE, "/_query/async/{id}")); + } + + @Override + public String getName() { + return "esql_delete_async_result"; + } + + @Override + protected BaseRestHandler.RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + DeleteAsyncResultRequest delete = new DeleteAsyncResultRequest(request.param("id")); + return channel -> client.execute(DeleteAsyncResultAction.INSTANCE, delete, new RestToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlGetAsyncResultAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlGetAsyncResultAction.java new file mode 100644 index 0000000000000..05da87017e755 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlGetAsyncResultAction.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.action; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestChunkedToXContentListener; +import org.elasticsearch.xpack.core.async.GetAsyncResultRequest; + +import java.util.List; + +import static org.elasticsearch.rest.RestRequest.Method.GET; + +public class RestEsqlGetAsyncResultAction extends BaseRestHandler { + @Override + public List routes() { + return List.of(new Route(GET, "/_query/async/{id}")); + } + + @Override + public String getName() { + return "esql_get_async_result"; + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) { + GetAsyncResultRequest get = new GetAsyncResultRequest(request.param("id")); + if (request.hasParam("wait_for_completion_timeout")) { + get.setWaitForCompletionTimeout(request.paramAsTime("wait_for_completion_timeout", get.getWaitForCompletionTimeout())); + } + if (request.hasParam("keep_alive")) { + get.setKeepAlive(request.paramAsTime("keep_alive", get.getKeepAlive())); + } + return channel -> client.execute(EsqlAsyncGetResultAction.INSTANCE, get, new RestChunkedToXContentListener<>(channel)); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java index 7a1b7f7b9b927..6b8e7fc397865 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/RestEsqlQueryAction.java @@ -48,7 +48,7 @@ public List routes() { protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { EsqlQueryRequest esqlRequest; try (XContentParser parser = request.contentOrSourceParamParser()) { - esqlRequest = EsqlQueryRequest.fromXContent(parser); + esqlRequest = EsqlQueryRequest.fromXContentSync(parser); } LOGGER.info("Beginning execution of ESQL query.\nQuery string: [{}]", esqlRequest.query()); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java index 6959c04345d31..674a32db1f0fb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/analysis/Analyzer.java @@ -68,6 +68,7 @@ import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; +import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; @@ -416,6 +417,40 @@ private LogicalPlan resolveEval(Eval eval, List childOutput) { return changed ? new Eval(eval.source(), eval.child(), newFields) : eval; } + /** + * resolve each item manually. + * + * Fields are added in the order they appear. + * + * If one field matches multiple expressions, the following precedence rules apply (higher to lower): + * 1. complete field name (ie. no wildcards) + * 2. partial wildcard expressions (eg. fieldNam*) + * 3. wildcard only (ie. *) + * + * If a field name matches multiple expressions with the same precedence, last one is used. + * + * A few examples below: + * + * // full name + * row foo = 1, bar = 2 | keep foo, bar, foo -> bar, foo + * + * // the full name has precedence on wildcard expression + * row foo = 1, bar = 2 | keep foo, bar, foo* -> foo, bar + * + * // the two wildcard expressions have the same priority, even though the first one is more specific + * // so last one wins + * row foo = 1, bar = 2 | keep foo*, bar, fo* -> bar, foo + * + * // * has the lowest priority + * row foo = 1, bar = 2 | keep *, foo -> bar, foo + * row foo = 1, bar = 2 | keep foo, * -> foo, bar + * row foo = 1, bar = 2 | keep bar*, foo, * -> bar, foo + * + * + * @param p + * @param childOutput + * @return + */ private LogicalPlan resolveKeep(Project p, List childOutput) { List resolvedProjections = new ArrayList<>(); var projections = p.projections(); @@ -427,26 +462,31 @@ private LogicalPlan resolveKeep(Project p, List childOutput) { } // otherwise resolve them else { - var starPosition = -1; // no star - // resolve each item manually while paying attention to: - // 1. name patterns a*, *b, a*b - // 2. star * - which can only appear once and signifies "everything else" - this will be added at the end - for (var ne : projections) { - if (ne instanceof UnresolvedStar) { - starPosition = resolvedProjections.size(); - } else if (ne instanceof UnresolvedAttribute ua) { - resolvedProjections.addAll(resolveAgainstList(ua, childOutput)); - } else { - // if this gets here it means it was already resolved - resolvedProjections.add(ne); + Map priorities = new LinkedHashMap<>(); + for (Attribute attribute : childOutput) { + for (var proj : projections) { + List resolved; + int priority; + if (proj instanceof UnresolvedStar) { + resolved = childOutput; + priority = 2; + } else if (proj instanceof UnresolvedAttribute ua) { + resolved = resolveAgainstList(ua, childOutput); + priority = Regex.isSimpleMatchPattern(ua.name()) ? 1 : 0; + } else { + resolved = List.of(attribute); + priority = 0; + } + for (Attribute attr : resolved) { + Integer previousPrio = priorities.get(attr); + if (previousPrio == null || previousPrio >= priority) { + priorities.remove(attr); + priorities.put(attr, priority); + } + } } } - // compute star if specified and add it to the list - if (starPosition >= 0) { - var remainingProjections = new ArrayList<>(childOutput); - remainingProjections.removeAll(resolvedProjections); - resolvedProjections.addAll(starPosition, remainingProjections); - } + resolvedProjections = new ArrayList<>(priorities.keySet()); } return new EsqlProject(p.source(), p.child(), resolvedProjections); diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java index 945f543329c15..6d57b239e94a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichLookupService.java @@ -11,6 +11,7 @@ import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.UnavailableShardsException; +import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.ClusterState; @@ -24,7 +25,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.compute.OwningChannelActionListener; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BlockStreamInput; @@ -369,7 +369,7 @@ private class TransportHandler implements TransportRequestHandler @Override public void messageReceived(LookupRequest request, TransportChannel channel, Task task) { request.incRef(); - ActionListener listener = ActionListener.runBefore(new OwningChannelActionListener<>(channel), request::decRef); + ActionListener listener = ActionListener.runBefore(new ChannelActionListener<>(channel), request::decRef); doLookup( request.sessionId, (CancellableTask) task, @@ -378,7 +378,7 @@ public void messageReceived(LookupRequest request, TransportChannel channel, Tas request.matchField, request.inputPage, request.extractFields, - listener.map(LookupResponse::new) + listener.delegateFailureAndWrap((l, outPage) -> ActionListener.respondAndRelease(l, new LookupResponse(outPage))) ); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java index 246849896bcdf..1e21886a7ac4b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/enrich/EnrichPolicyResolver.java @@ -9,12 +9,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; +import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.compute.OwningChannelActionListener; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportChannel; @@ -110,7 +110,7 @@ public void messageReceived(ResolveRequest request, TransportChannel channel, Ta String policyName = request.policyName; EnrichPolicy policy = policies().get(policyName); ThreadContext threadContext = threadPool.getThreadContext(); - ActionListener listener = new OwningChannelActionListener<>(channel); + ActionListener listener = new ChannelActionListener<>(channel); listener = ContextPreservingActionListener.wrapPreservingContext(listener, threadContext); try (ThreadContext.StoredContext ignored = threadContext.stashWithOrigin(ClientHelper.ENRICH_ORIGIN)) { indexResolver.resolveAsMergedMapping( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java index 280ef898c3b90..1705a5b7bd215 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/EvalMapper.java @@ -132,7 +132,7 @@ private Block eval(Block lhs, Block rhs) { private Block eval(BooleanVector lhs, BooleanVector rhs) { int positionCount = lhs.getPositionCount(); - try (var result = BooleanVector.newVectorFixedBuilder(positionCount, lhs.blockFactory())) { + try (var result = lhs.blockFactory().newBooleanVectorFixedBuilder(positionCount)) { for (int p = 0; p < positionCount; p++) { result.appendBoolean(bl.function().apply(lhs.getBoolean(p), rhs.getBoolean(p))); } @@ -269,12 +269,7 @@ public Block eval(Page page) { if (fieldBlock.asVector() != null) { return BooleanBlock.newConstantBlockWith(false, page.getPositionCount(), driverContext.blockFactory()); } - try ( - BooleanVector.FixedBuilder builder = BooleanVector.newVectorFixedBuilder( - page.getPositionCount(), - driverContext.blockFactory() - ) - ) { + try (var builder = driverContext.blockFactory().newBooleanVectorFixedBuilder(page.getPositionCount())) { for (int p = 0; p < page.getPositionCount(); p++) { builder.appendBoolean(fieldBlock.isNull(p)); } @@ -323,12 +318,7 @@ public Block eval(Page page) { if (fieldBlock.asVector() != null) { return BooleanBlock.newConstantBlockWith(true, page.getPositionCount(), driverContext.blockFactory()); } - try ( - BooleanVector.FixedBuilder builder = BooleanVector.newVectorFixedBuilder( - page.getPositionCount(), - driverContext.blockFactory() - ) - ) { + try (var builder = driverContext.blockFactory().newBooleanVectorFixedBuilder(page.getPositionCount())) { for (int p = 0; p < page.getPositionCount(); p++) { builder.appendBoolean(fieldBlock.isNull(p) == false); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java index 6ef37abf5a9b4..cc90c76723d9c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/evaluator/predicate/operator/comparison/InMapper.java @@ -8,8 +8,7 @@ package org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BooleanArrayBlock; -import org.elasticsearch.compute.data.BooleanArrayVector; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BooleanVector; import org.elasticsearch.compute.data.Page; @@ -41,10 +40,12 @@ public ExpressionEvaluator.Factory map(In in, Layout layout) { ExpressionEvaluator.Factory eqEvaluator = ((ExpressionMapper) EQUALS).map(eq, layout); listEvaluators.add(eqEvaluator); }); - return dvrCtx -> new InExpressionEvaluator(listEvaluators.stream().map(fac -> fac.get(dvrCtx)).toList()); + return dvrCtx -> new InExpressionEvaluator(dvrCtx.blockFactory(), listEvaluators.stream().map(fac -> fac.get(dvrCtx)).toList()); } - record InExpressionEvaluator(List listEvaluators) implements EvalOperator.ExpressionEvaluator { + record InExpressionEvaluator(BlockFactory blockFactory, List listEvaluators) + implements + EvalOperator.ExpressionEvaluator { @Override public Block eval(Page page) { int positionCount = page.getPositionCount(); @@ -68,7 +69,7 @@ public Block eval(Page page) { } } - return evalWithNulls(values, nulls, nullInValues); + return evalWithNulls(blockFactory(), values, nulls, nullInValues); } private static void updateValues(BooleanVector vector, boolean[] values) { @@ -94,9 +95,9 @@ private static void updateValues(BooleanBlock block, boolean[] values, BitSet nu } } - private static Block evalWithNulls(boolean[] values, BitSet nulls, boolean nullInValues) { + private static Block evalWithNulls(BlockFactory blockFactory, boolean[] values, BitSet nulls, boolean nullInValues) { if (nulls.isEmpty() && nullInValues == false) { - return new BooleanArrayVector(values, values.length).asBlock(); + return blockFactory.newBooleanArrayVector(values, values.length).asBlock(); } else { // 3VL: true trumps null; null trumps false. for (int i = 0; i < values.length; i++) { @@ -108,9 +109,9 @@ private static Block evalWithNulls(boolean[] values, BitSet nulls, boolean nullI } if (nulls.isEmpty()) { // no nulls and no multi-values means we must use a Vector - return new BooleanArrayVector(values, values.length).asBlock(); + return blockFactory.newBooleanArrayVector(values, values.length).asBlock(); } else { - return new BooleanArrayBlock(values, values.length, null, nulls, Block.MvOrdering.UNORDERED); + return blockFactory.newBooleanArrayBlock(values, values.length, null, nulls, Block.MvOrdering.UNORDERED); } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java index bb384ae846f26..33bd3098f2e3b 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPoint.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -34,7 +36,11 @@ public class ToCartesianPoint extends AbstractConvertFunction { Map.entry(TEXT, ToCartesianPointFromStringEvaluator.Factory::new) ); - public ToCartesianPoint(Source source, Expression field) { + @FunctionInfo(returnType = "cartesian_point") + public ToCartesianPoint( + Source source, + @Param(name = "v", type = { "cartesian_point", "long", "unsigned_long", "keyword", "text" }) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java index 75ef5c324541b..c78597706de45 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPoint.java @@ -9,6 +9,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.ann.ConvertEvaluator; +import org.elasticsearch.xpack.esql.expression.function.FunctionInfo; +import org.elasticsearch.xpack.esql.expression.function.Param; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.tree.NodeInfo; import org.elasticsearch.xpack.ql.tree.Source; @@ -34,7 +36,11 @@ public class ToGeoPoint extends AbstractConvertFunction { Map.entry(TEXT, ToGeoPointFromStringEvaluator.Factory::new) ); - public ToGeoPoint(Source source, Expression field) { + @FunctionInfo(returnType = "geo_point") + public ToGeoPoint( + Source source, + @Param(name = "v", type = { "geo_point", "long", "unsigned_long", "keyword", "text" }) Expression field + ) { super(source, field); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java index bf05aeee4d228..9996e95993904 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvCount.java @@ -9,7 +9,6 @@ import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.IntBlock; -import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.compute.operator.EvalOperator.ExpressionEvaluator; @@ -123,7 +122,7 @@ protected Block evalNullable(Block block) { @Override protected Block evalNotNullable(Block block) { - try (var builder = IntVector.newVectorFixedBuilder(block.getPositionCount(), driverContext.blockFactory())) { + try (var builder = driverContext.blockFactory().newIntVectorFixedBuilder(block.getPositionCount())) { for (int p = 0; p < block.getPositionCount(); p++) { builder.appendInt(block.getValueCount(p)); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java index a0abced909c48..48b83aa205549 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/scalar/multivalue/MvSum.java @@ -45,7 +45,7 @@ protected ExpressionEvaluator.Factory evaluator(ExpressionEvaluator.Factory fiel case LONG -> field().dataType() == DataTypes.UNSIGNED_LONG ? new MvSumUnsignedLongEvaluator.Factory(source(), fieldEval) : new MvSumLongEvaluator.Factory(source(), fieldEval); - case NULL -> dvrCtx -> EvalOperator.CONSTANT_NULL; + case NULL -> EvalOperator.CONSTANT_NULL_FACTORY; default -> throw EsqlIllegalArgumentException.illegalDataType(field.dataType()); }; diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java index 3451a3981d3e3..1f833a9254ab8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizer.java @@ -7,13 +7,24 @@ package org.elasticsearch.xpack.esql.optimizer; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockUtils; +import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; +import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; +import org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer.PropagateEmptyRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.TopN; +import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; +import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.stats.SearchStats; import org.elasticsearch.xpack.ql.expression.Alias; +import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.NamedExpression; +import org.elasticsearch.xpack.ql.expression.function.aggregate.AggregateFunction; import org.elasticsearch.xpack.ql.optimizer.OptimizerRules; import org.elasticsearch.xpack.ql.plan.logical.Aggregate; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; @@ -23,10 +34,15 @@ import org.elasticsearch.xpack.ql.plan.logical.Project; import org.elasticsearch.xpack.ql.rule.ParameterizedRule; import org.elasticsearch.xpack.ql.rule.ParameterizedRuleExecutor; +import org.elasticsearch.xpack.ql.type.DataType; +import org.elasticsearch.xpack.ql.type.DataTypes; import java.util.ArrayList; import java.util.List; +import static java.util.Arrays.asList; +import static org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer.cleanup; +import static org.elasticsearch.xpack.esql.optimizer.LogicalPlanOptimizer.operators; import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.UP; public class LocalLogicalPlanOptimizer extends ParameterizedRuleExecutor { @@ -37,15 +53,34 @@ public LocalLogicalPlanOptimizer(LocalLogicalOptimizerContext localLogicalOptimi @Override protected List> batches() { - var local = new Batch<>("Local rewrite", new ReplaceTopNWithLimitAndSort(), new ReplaceMissingFieldWithNull()); + var local = new Batch<>( + "Local rewrite", + Limiter.ONCE, + new ReplaceTopNWithLimitAndSort(), + new ReplaceMissingFieldWithNull(), + new InferIsNotNull() + ); var rules = new ArrayList>(); rules.add(local); // TODO: if the local rules haven't touched the tree, the rest of the rules can be skipped - rules.addAll(LogicalPlanOptimizer.rules()); + rules.addAll(asList(operators(), cleanup())); + replaceRules(rules); return rules; } + private List> replaceRules(List> listOfRules) { + for (Batch batch : listOfRules) { + var rules = batch.rules(); + for (int i = 0; i < rules.length; i++) { + if (rules[i] instanceof PropagateEmptyRelation) { + rules[i] = new LocalPropagateEmptyRelation(); + } + } + } + return listOfRules; + } + public LogicalPlan localOptimize(LogicalPlan plan) { return execute(plan); } @@ -116,6 +151,40 @@ else if (plan instanceof Project project) { } } + static class InferIsNotNull extends OptimizerRules.InferIsNotNull { + + @Override + protected boolean skipExpression(Expression e) { + return e instanceof Coalesce; + } + } + + /** + * Local aggregation can only produce intermediate state that get wired into the global agg. + */ + private static class LocalPropagateEmptyRelation extends PropagateEmptyRelation { + + /** + * Local variant of the aggregation that returns the intermediate value. + */ + @Override + protected void aggOutput(NamedExpression agg, AggregateFunction aggFunc, BlockFactory blockFactory, List blocks) { + List output = AbstractPhysicalOperationProviders.intermediateAttributes(List.of(agg), List.of()); + for (Attribute o : output) { + DataType dataType = o.dataType(); + // boolean right now is used for the internal #seen so always return true + var value = dataType == DataTypes.BOOLEAN ? true + // look for count(literal) with literal != null + : aggFunc instanceof Count count && (count.foldable() == false || count.fold() != null) ? 0L + // otherwise nullify + : null; + var wrapper = BlockUtils.wrapperFor(blockFactory, PlannerUtils.toElementType(dataType), 1); + wrapper.accept(value); + blocks.add(wrapper.builder().build()); + } + } + } + abstract static class ParameterizedOptimizerRule extends ParameterizedRule { public final LogicalPlan apply(LogicalPlan plan, P context) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java index 55ead7aa3fe4e..3744adbc0bfaf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizer.java @@ -10,7 +10,6 @@ import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; -import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.NotEquals; import org.elasticsearch.xpack.esql.expression.function.aggregate.Count; @@ -29,9 +28,9 @@ import org.elasticsearch.xpack.esql.plan.physical.TopNExec; import org.elasticsearch.xpack.esql.plan.physical.UnaryExec; import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; +import org.elasticsearch.xpack.esql.planner.EsqlTranslatorHandler; import org.elasticsearch.xpack.esql.planner.PhysicalVerificationException; import org.elasticsearch.xpack.esql.planner.PhysicalVerifier; -import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.ql.common.Failure; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.Attribute; @@ -43,15 +42,15 @@ import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.expression.Order; import org.elasticsearch.xpack.ql.expression.TypedAttribute; -import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; +import org.elasticsearch.xpack.ql.expression.function.scalar.UnaryScalarFunction; import org.elasticsearch.xpack.ql.expression.predicate.Predicates; import org.elasticsearch.xpack.ql.expression.predicate.logical.BinaryLogic; import org.elasticsearch.xpack.ql.expression.predicate.logical.Not; +import org.elasticsearch.xpack.ql.expression.predicate.nulls.IsNotNull; +import org.elasticsearch.xpack.ql.expression.predicate.nulls.IsNull; import org.elasticsearch.xpack.ql.expression.predicate.operator.comparison.BinaryComparison; import org.elasticsearch.xpack.ql.expression.predicate.regex.RegexMatch; import org.elasticsearch.xpack.ql.expression.predicate.regex.WildcardLike; -import org.elasticsearch.xpack.ql.planner.ExpressionTranslator; -import org.elasticsearch.xpack.ql.planner.QlTranslatorHandler; import org.elasticsearch.xpack.ql.querydsl.query.Query; import org.elasticsearch.xpack.ql.rule.ParameterizedRuleExecutor; import org.elasticsearch.xpack.ql.rule.Rule; @@ -65,7 +64,6 @@ import java.util.LinkedList; import java.util.List; import java.util.Set; -import java.util.function.Supplier; import java.util.stream.Collectors; import static java.util.Arrays.asList; @@ -76,7 +74,7 @@ import static org.elasticsearch.xpack.ql.optimizer.OptimizerRules.TransformDirection.UP; public class LocalPhysicalPlanOptimizer extends ParameterizedRuleExecutor { - public static final QlTranslatorHandler TRANSLATOR_HANDLER = new EsqlTranslatorHandler(); + public static final EsqlTranslatorHandler TRANSLATOR_HANDLER = new EsqlTranslatorHandler(); private final PhysicalVerifier verifier = new PhysicalVerifier(); @@ -206,7 +204,8 @@ protected PhysicalPlan rule(FilterExec filterExec) { (canPushToSource(exp) ? pushable : nonPushable).add(exp); } if (pushable.size() > 0) { // update the executable with pushable conditions - QueryBuilder planQuery = TRANSLATOR_HANDLER.asQuery(Predicates.combineAnd(pushable)).asBuilder(); + Query queryDSL = TRANSLATOR_HANDLER.asQuery(Predicates.combineAnd(pushable)); + QueryBuilder planQuery = queryDSL.asBuilder(); var query = Queries.combine(Clause.FILTER, asList(queryExec.query(), planQuery)); queryExec = new EsQueryExec( queryExec.source(), @@ -233,17 +232,19 @@ public static boolean canPushToSource(Expression exp) { return isAttributePushable(bc.left(), bc) && bc.right().foldable(); } else if (exp instanceof BinaryLogic bl) { return canPushToSource(bl.left()) && canPushToSource(bl.right()); - } else if (exp instanceof RegexMatch rm) { - return isAttributePushable(rm.field(), rm); } else if (exp instanceof In in) { return isAttributePushable(in.value(), null) && Expressions.foldable(in.list()); } else if (exp instanceof Not not) { return canPushToSource(not.field()); + } else if (exp instanceof UnaryScalarFunction usf) { + if (usf instanceof RegexMatch || usf instanceof IsNull || usf instanceof IsNotNull) { + return isAttributePushable(usf.field(), usf); + } } return false; } - private static boolean isAttributePushable(Expression expression, ScalarFunction operation) { + private static boolean isAttributePushable(Expression expression, Expression operation) { if (expression instanceof FieldAttribute f && f.getExactInfo().hasExact()) { return isAggregatable(f); } @@ -404,22 +405,4 @@ private Tuple, List> pushableStats(AggregateExec aggregate } } - private static final class EsqlTranslatorHandler extends QlTranslatorHandler { - @Override - public Query wrapFunctionQuery(ScalarFunction sf, Expression field, Supplier querySupplier) { - if (field instanceof FieldAttribute fa) { - if (fa.getExactInfo().hasExact()) { - var exact = fa.exactAttribute(); - if (exact != fa) { - fa = exact; - } - } - return ExpressionTranslator.wrapIfNested(new SingleValueQuery(querySupplier.get(), fa.name()), field); - } - if (field instanceof MetadataAttribute) { - return querySupplier.get(); // MetadataAttributes are always single valued - } - throw new EsqlIllegalArgumentException("Expected a FieldAttribute or MetadataAttribute but received [" + field + "]"); - } - } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java index 66654b78c3af4..e4f67838731a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizer.java @@ -25,7 +25,6 @@ import org.elasticsearch.xpack.esql.plan.logical.local.EsqlProject; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; -import org.elasticsearch.xpack.esql.planner.AbstractPhysicalOperationProviders; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.expression.Alias; @@ -63,7 +62,6 @@ import org.elasticsearch.xpack.ql.rule.ParameterizedRuleExecutor; import org.elasticsearch.xpack.ql.rule.Rule; import org.elasticsearch.xpack.ql.tree.Source; -import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.util.CollectionUtils; import org.elasticsearch.xpack.ql.util.Holder; @@ -101,17 +99,8 @@ protected List> batches() { return rules(); } - protected static List> rules() { - var substitutions = new Batch<>( - "Substitutions", - Limiter.ONCE, - new SubstituteSurrogates(), - new ReplaceRegexMatch(), - new ReplaceAliasingEvalWithProject() - // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 - ); - - var operators = new Batch<>( + protected static Batch operators() { + return new Batch<>( "Operator Optimization", new CombineProjections(), new CombineEvals(), @@ -146,19 +135,33 @@ protected static List> rules() { new PruneOrderByBeforeStats(), new PruneRedundantSortClauses() ); + } - var skip = new Batch<>("Skip Compute", new SkipQueryOnLimitZero()); - var cleanup = new Batch<>( + protected static Batch cleanup() { + return new Batch<>( "Clean Up", new ReplaceDuplicateAggWithEval(), // pushing down limits again, because ReplaceDuplicateAggWithEval could create new Project nodes that can still be optimized new PushDownAndCombineLimits(), new ReplaceLimitAndSortAsTopN() ); + } + + protected static List> rules() { + var substitutions = new Batch<>( + "Substitutions", + Limiter.ONCE, + new SubstituteSurrogates(), + new ReplaceRegexMatch(), + new ReplaceAliasingEvalWithProject() + // new NormalizeAggregate(), - waits on https://github.com/elastic/elasticsearch/issues/100634 + ); + + var skip = new Batch<>("Skip Compute", new SkipQueryOnLimitZero()); var defaultTopN = new Batch<>("Add default TopN", new AddDefaultTopN()); var label = new Batch<>("Set as Optimized", Limiter.ONCE, new SetAsOptimized()); - return asList(substitutions, operators, skip, cleanup, defaultTopN, label); + return asList(substitutions, operators(), skip, cleanup(), defaultTopN, label); } // TODO: currently this rule only works for aggregate functions (AVG) @@ -633,6 +636,7 @@ protected LogicalPlan rule(UnaryPlan plan) { } } + @SuppressWarnings("removal") static class PropagateEmptyRelation extends OptimizerRules.OptimizerRule { @Override @@ -650,29 +654,14 @@ protected LogicalPlan rule(UnaryPlan plan) { return p; } - private static List aggsFromEmpty(List aggs) { - // TODO: Should we introduce skip operator that just never queries the source + private List aggsFromEmpty(List aggs) { List blocks = new ArrayList<>(); - var blockFactory = BlockFactory.getNonBreakingInstance(); + var blockFactory = PlannerUtils.NON_BREAKING_BLOCK_FACTORY; int i = 0; for (var agg : aggs) { // there needs to be an alias if (agg instanceof Alias a && a.child() instanceof AggregateFunction aggFunc) { - List output = AbstractPhysicalOperationProviders.intermediateAttributes(List.of(agg), List.of()); - for (Attribute o : output) { - DataType dataType = o.dataType(); - // fill the boolean block later in LocalExecutionPlanner - if (dataType != DataTypes.BOOLEAN) { - // look for count(literal) with literal != null - var wrapper = BlockUtils.wrapperFor(blockFactory, PlannerUtils.toElementType(dataType), 1); - if (aggFunc instanceof Count count && (count.foldable() == false || count.fold() != null)) { - wrapper.accept(0L); - } else { - wrapper.accept(null); - } - blocks.add(wrapper.builder().build()); - } - } + aggOutput(agg, aggFunc, blockFactory, blocks); } else { throw new EsqlIllegalArgumentException("Did not expect a non-aliased aggregation {}", agg); } @@ -680,6 +669,16 @@ private static List aggsFromEmpty(List aggs) { return blocks; } + /** + * The folded aggregation output - this variant is for the coordinator/final. + */ + protected void aggOutput(NamedExpression agg, AggregateFunction aggFunc, BlockFactory blockFactory, List blocks) { + // look for count(literal) with literal != null + Object value = aggFunc instanceof Count count && (count.foldable() == false || count.fold() != null) ? 0L : null; + var wrapper = BlockUtils.wrapperFor(blockFactory, PlannerUtils.toElementType(aggFunc.dataType()), 1); + wrapper.accept(value); + blocks.add(wrapper.builder().build()); + } } private static LogicalPlan skipPlan(UnaryPlan plan) { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp index 12542878c3ed3..585f722065e6f 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.interp @@ -25,15 +25,15 @@ null null null null -null +'|' null null null 'by' 'and' 'asc' -null -null +'=' +',' 'desc' '.' 'false' @@ -51,8 +51,6 @@ null 'rlike' ')' 'true' -'info' -'functions' '==' '!=' '<' @@ -71,8 +69,19 @@ null null null null -'as' 'metadata' +null +null +null +null +null +null +null +null +'as' +null +null +null 'on' 'with' null @@ -81,6 +90,14 @@ null null null null +null +null +null +'info' +'functions' +null +null +null token symbolic names: null @@ -135,8 +152,6 @@ PARAM RLIKE RP TRUE -INFO -FUNCTIONS EQ NEQ LT @@ -155,16 +170,35 @@ QUOTED_IDENTIFIER EXPR_LINE_COMMENT EXPR_MULTILINE_COMMENT EXPR_WS -AS METADATA +FROM_UNQUOTED_IDENTIFIER +FROM_LINE_COMMENT +FROM_MULTILINE_COMMENT +FROM_WS +PROJECT_UNQUOTED_IDENTIFIER +PROJECT_LINE_COMMENT +PROJECT_MULTILINE_COMMENT +PROJECT_WS +AS +RENAME_LINE_COMMENT +RENAME_MULTILINE_COMMENT +RENAME_WS ON WITH -SRC_UNQUOTED_IDENTIFIER -SRC_QUOTED_IDENTIFIER -SRC_LINE_COMMENT -SRC_MULTILINE_COMMENT -SRC_WS -EXPLAIN_PIPE +ENRICH_LINE_COMMENT +ENRICH_MULTILINE_COMMENT +ENRICH_WS +ENRICH_FIELD_LINE_COMMENT +ENRICH_FIELD_MULTILINE_COMMENT +ENRICH_FIELD_WS +MVEXPAND_LINE_COMMENT +MVEXPAND_MULTILINE_COMMENT +MVEXPAND_WS +INFO +FUNCTIONS +SHOW_LINE_COMMENT +SHOW_MULTILINE_COMMENT +SHOW_WS rule names: DISSECT @@ -200,6 +234,11 @@ LETTER ESCAPE_SEQUENCE UNESCAPED_CHARS EXPONENT +ASPERAND +BACKQUOTE +BACKQUOTE_BLOCK +UNDERSCORE +UNQUOTED_ID_BODY STRING INTEGER_LITERAL DECIMAL_LITERAL @@ -225,8 +264,6 @@ PARAM RLIKE RP TRUE -INFO -FUNCTIONS EQ NEQ LT @@ -245,21 +282,68 @@ QUOTED_IDENTIFIER EXPR_LINE_COMMENT EXPR_MULTILINE_COMMENT EXPR_WS -SRC_PIPE -SRC_OPENING_BRACKET -SRC_CLOSING_BRACKET -SRC_COMMA -SRC_ASSIGN -AS +FROM_PIPE +FROM_OPENING_BRACKET +FROM_CLOSING_BRACKET +FROM_COMMA +FROM_ASSIGN METADATA +FROM_UNQUOTED_IDENTIFIER_PART +FROM_UNQUOTED_IDENTIFIER +FROM_QUOTED_IDENTIFIER +FROM_LINE_COMMENT +FROM_MULTILINE_COMMENT +FROM_WS +PROJECT_PIPE +PROJECT_DOT +PROJECT_COMMA +UNQUOTED_ID_BODY_WITH_PATTERN +PROJECT_UNQUOTED_IDENTIFIER +PROJECT_QUOTED_IDENTIFIER +PROJECT_LINE_COMMENT +PROJECT_MULTILINE_COMMENT +PROJECT_WS +RENAME_PIPE +RENAME_ASSIGN +RENAME_COMMA +RENAME_DOT +AS +RENAME_QUOTED_IDENTIFIER +RENAME_UNQUOTED_IDENTIFIER +RENAME_LINE_COMMENT +RENAME_MULTILINE_COMMENT +RENAME_WS +ENRICH_PIPE ON WITH -SRC_UNQUOTED_IDENTIFIER -SRC_UNQUOTED_IDENTIFIER_PART -SRC_QUOTED_IDENTIFIER -SRC_LINE_COMMENT -SRC_MULTILINE_COMMENT -SRC_WS +ENRICH_POLICY_UNQUOTED_IDENTIFIER +ENRICH_QUOTED_IDENTIFIER +ENRICH_LINE_COMMENT +ENRICH_MULTILINE_COMMENT +ENRICH_WS +ENRICH_FIELD_PIPE +ENRICH_FIELD_ASSIGN +ENRICH_FIELD_COMMA +ENRICH_FIELD_DOT +ENRICH_FIELD_WITH +ENRICH_FIELD_UNQUOTED_IDENTIFIER +ENRICH_FIELD_QUOTED_IDENTIFIER +ENRICH_FIELD_LINE_COMMENT +ENRICH_FIELD_MULTILINE_COMMENT +ENRICH_FIELD_WS +MVEXPAND_PIPE +MVEXPAND_DOT +MVEXPAND_QUOTED_IDENTIFIER +MVEXPAND_UNQUOTED_IDENTIFIER +MVEXPAND_LINE_COMMENT +MVEXPAND_MULTILINE_COMMENT +MVEXPAND_WS +SHOW_PIPE +INFO +FUNCTIONS +SHOW_LINE_COMMENT +SHOW_MULTILINE_COMMENT +SHOW_WS channel names: DEFAULT_TOKEN_CHANNEL @@ -268,8 +352,14 @@ HIDDEN mode names: DEFAULT_MODE EXPLAIN_MODE -EXPRESSION -SOURCE_IDENTIFIERS +EXPRESSION_MODE +FROM_MODE +PROJECT_MODE +RENAME_MODE +ENRICH_MODE +ENRICH_FIELD_MODE +MVEXPAND_MODE +SHOW_MODE atn: -[4, 0, 81, 764, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 345, 8, 18, 11, 18, 12, 18, 346, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 355, 8, 19, 10, 19, 12, 19, 358, 9, 19, 1, 19, 3, 19, 361, 8, 19, 1, 19, 3, 19, 364, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 373, 8, 20, 10, 20, 12, 20, 376, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 384, 8, 21, 11, 21, 12, 21, 385, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 427, 8, 32, 1, 32, 4, 32, 430, 8, 32, 11, 32, 12, 32, 431, 1, 33, 1, 33, 1, 33, 5, 33, 437, 8, 33, 10, 33, 12, 33, 440, 9, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 5, 33, 448, 8, 33, 10, 33, 12, 33, 451, 9, 33, 1, 33, 1, 33, 1, 33, 1, 33, 1, 33, 3, 33, 458, 8, 33, 1, 33, 3, 33, 461, 8, 33, 3, 33, 463, 8, 33, 1, 34, 4, 34, 466, 8, 34, 11, 34, 12, 34, 467, 1, 35, 4, 35, 471, 8, 35, 11, 35, 12, 35, 472, 1, 35, 1, 35, 5, 35, 477, 8, 35, 10, 35, 12, 35, 480, 9, 35, 1, 35, 1, 35, 4, 35, 484, 8, 35, 11, 35, 12, 35, 485, 1, 35, 4, 35, 489, 8, 35, 11, 35, 12, 35, 490, 1, 35, 1, 35, 5, 35, 495, 8, 35, 10, 35, 12, 35, 498, 9, 35, 3, 35, 500, 8, 35, 1, 35, 1, 35, 1, 35, 1, 35, 4, 35, 506, 8, 35, 11, 35, 12, 35, 507, 1, 35, 1, 35, 3, 35, 512, 8, 35, 1, 36, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 38, 1, 38, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 47, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 51, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 61, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 65, 1, 65, 1, 65, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 71, 1, 71, 1, 71, 1, 72, 1, 72, 1, 72, 1, 72, 1, 72, 1, 73, 1, 73, 1, 73, 1, 73, 5, 73, 654, 8, 73, 10, 73, 12, 73, 657, 9, 73, 1, 73, 1, 73, 1, 73, 1, 73, 4, 73, 663, 8, 73, 11, 73, 12, 73, 664, 3, 73, 667, 8, 73, 1, 74, 1, 74, 1, 74, 1, 74, 5, 74, 673, 8, 74, 10, 74, 12, 74, 676, 9, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 1, 76, 1, 76, 1, 77, 1, 77, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 4, 87, 738, 8, 87, 11, 87, 12, 87, 739, 1, 88, 4, 88, 743, 8, 88, 11, 88, 12, 88, 744, 1, 88, 1, 88, 3, 88, 749, 8, 88, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 2, 374, 449, 0, 93, 4, 1, 6, 2, 8, 3, 10, 4, 12, 5, 14, 6, 16, 7, 18, 8, 20, 9, 22, 10, 24, 11, 26, 12, 28, 13, 30, 14, 32, 15, 34, 16, 36, 17, 38, 18, 40, 19, 42, 20, 44, 21, 46, 22, 48, 0, 50, 81, 52, 23, 54, 24, 56, 25, 58, 26, 60, 0, 62, 0, 64, 0, 66, 0, 68, 0, 70, 27, 72, 28, 74, 29, 76, 30, 78, 31, 80, 32, 82, 33, 84, 34, 86, 35, 88, 36, 90, 37, 92, 38, 94, 39, 96, 40, 98, 41, 100, 42, 102, 43, 104, 44, 106, 45, 108, 46, 110, 47, 112, 48, 114, 49, 116, 50, 118, 51, 120, 52, 122, 53, 124, 54, 126, 55, 128, 56, 130, 57, 132, 58, 134, 59, 136, 60, 138, 61, 140, 62, 142, 63, 144, 64, 146, 65, 148, 66, 150, 67, 152, 68, 154, 69, 156, 70, 158, 71, 160, 0, 162, 0, 164, 0, 166, 0, 168, 0, 170, 72, 172, 73, 174, 74, 176, 75, 178, 76, 180, 0, 182, 77, 184, 78, 186, 79, 188, 80, 4, 0, 1, 2, 3, 13, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 2, 0, 64, 64, 95, 95, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 792, 0, 4, 1, 0, 0, 0, 0, 6, 1, 0, 0, 0, 0, 8, 1, 0, 0, 0, 0, 10, 1, 0, 0, 0, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 1, 48, 1, 0, 0, 0, 1, 50, 1, 0, 0, 0, 1, 52, 1, 0, 0, 0, 1, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 2, 58, 1, 0, 0, 0, 2, 70, 1, 0, 0, 0, 2, 72, 1, 0, 0, 0, 2, 74, 1, 0, 0, 0, 2, 76, 1, 0, 0, 0, 2, 78, 1, 0, 0, 0, 2, 80, 1, 0, 0, 0, 2, 82, 1, 0, 0, 0, 2, 84, 1, 0, 0, 0, 2, 86, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 3, 160, 1, 0, 0, 0, 3, 162, 1, 0, 0, 0, 3, 164, 1, 0, 0, 0, 3, 166, 1, 0, 0, 0, 3, 168, 1, 0, 0, 0, 3, 170, 1, 0, 0, 0, 3, 172, 1, 0, 0, 0, 3, 174, 1, 0, 0, 0, 3, 176, 1, 0, 0, 0, 3, 178, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 184, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 4, 190, 1, 0, 0, 0, 6, 200, 1, 0, 0, 0, 8, 207, 1, 0, 0, 0, 10, 216, 1, 0, 0, 0, 12, 223, 1, 0, 0, 0, 14, 233, 1, 0, 0, 0, 16, 240, 1, 0, 0, 0, 18, 247, 1, 0, 0, 0, 20, 261, 1, 0, 0, 0, 22, 268, 1, 0, 0, 0, 24, 276, 1, 0, 0, 0, 26, 288, 1, 0, 0, 0, 28, 298, 1, 0, 0, 0, 30, 307, 1, 0, 0, 0, 32, 313, 1, 0, 0, 0, 34, 320, 1, 0, 0, 0, 36, 327, 1, 0, 0, 0, 38, 335, 1, 0, 0, 0, 40, 344, 1, 0, 0, 0, 42, 350, 1, 0, 0, 0, 44, 367, 1, 0, 0, 0, 46, 383, 1, 0, 0, 0, 48, 389, 1, 0, 0, 0, 50, 394, 1, 0, 0, 0, 52, 399, 1, 0, 0, 0, 54, 403, 1, 0, 0, 0, 56, 407, 1, 0, 0, 0, 58, 411, 1, 0, 0, 0, 60, 415, 1, 0, 0, 0, 62, 417, 1, 0, 0, 0, 64, 419, 1, 0, 0, 0, 66, 422, 1, 0, 0, 0, 68, 424, 1, 0, 0, 0, 70, 462, 1, 0, 0, 0, 72, 465, 1, 0, 0, 0, 74, 511, 1, 0, 0, 0, 76, 513, 1, 0, 0, 0, 78, 516, 1, 0, 0, 0, 80, 520, 1, 0, 0, 0, 82, 524, 1, 0, 0, 0, 84, 526, 1, 0, 0, 0, 86, 528, 1, 0, 0, 0, 88, 533, 1, 0, 0, 0, 90, 535, 1, 0, 0, 0, 92, 541, 1, 0, 0, 0, 94, 547, 1, 0, 0, 0, 96, 552, 1, 0, 0, 0, 98, 554, 1, 0, 0, 0, 100, 557, 1, 0, 0, 0, 102, 560, 1, 0, 0, 0, 104, 565, 1, 0, 0, 0, 106, 569, 1, 0, 0, 0, 108, 574, 1, 0, 0, 0, 110, 580, 1, 0, 0, 0, 112, 583, 1, 0, 0, 0, 114, 585, 1, 0, 0, 0, 116, 591, 1, 0, 0, 0, 118, 593, 1, 0, 0, 0, 120, 598, 1, 0, 0, 0, 122, 603, 1, 0, 0, 0, 124, 613, 1, 0, 0, 0, 126, 616, 1, 0, 0, 0, 128, 619, 1, 0, 0, 0, 130, 621, 1, 0, 0, 0, 132, 624, 1, 0, 0, 0, 134, 626, 1, 0, 0, 0, 136, 629, 1, 0, 0, 0, 138, 631, 1, 0, 0, 0, 140, 633, 1, 0, 0, 0, 142, 635, 1, 0, 0, 0, 144, 637, 1, 0, 0, 0, 146, 639, 1, 0, 0, 0, 148, 644, 1, 0, 0, 0, 150, 666, 1, 0, 0, 0, 152, 668, 1, 0, 0, 0, 154, 679, 1, 0, 0, 0, 156, 683, 1, 0, 0, 0, 158, 687, 1, 0, 0, 0, 160, 691, 1, 0, 0, 0, 162, 696, 1, 0, 0, 0, 164, 702, 1, 0, 0, 0, 166, 708, 1, 0, 0, 0, 168, 712, 1, 0, 0, 0, 170, 716, 1, 0, 0, 0, 172, 719, 1, 0, 0, 0, 174, 728, 1, 0, 0, 0, 176, 731, 1, 0, 0, 0, 178, 737, 1, 0, 0, 0, 180, 748, 1, 0, 0, 0, 182, 750, 1, 0, 0, 0, 184, 752, 1, 0, 0, 0, 186, 756, 1, 0, 0, 0, 188, 760, 1, 0, 0, 0, 190, 191, 5, 100, 0, 0, 191, 192, 5, 105, 0, 0, 192, 193, 5, 115, 0, 0, 193, 194, 5, 115, 0, 0, 194, 195, 5, 101, 0, 0, 195, 196, 5, 99, 0, 0, 196, 197, 5, 116, 0, 0, 197, 198, 1, 0, 0, 0, 198, 199, 6, 0, 0, 0, 199, 5, 1, 0, 0, 0, 200, 201, 5, 100, 0, 0, 201, 202, 5, 114, 0, 0, 202, 203, 5, 111, 0, 0, 203, 204, 5, 112, 0, 0, 204, 205, 1, 0, 0, 0, 205, 206, 6, 1, 1, 0, 206, 7, 1, 0, 0, 0, 207, 208, 5, 101, 0, 0, 208, 209, 5, 110, 0, 0, 209, 210, 5, 114, 0, 0, 210, 211, 5, 105, 0, 0, 211, 212, 5, 99, 0, 0, 212, 213, 5, 104, 0, 0, 213, 214, 1, 0, 0, 0, 214, 215, 6, 2, 1, 0, 215, 9, 1, 0, 0, 0, 216, 217, 5, 101, 0, 0, 217, 218, 5, 118, 0, 0, 218, 219, 5, 97, 0, 0, 219, 220, 5, 108, 0, 0, 220, 221, 1, 0, 0, 0, 221, 222, 6, 3, 0, 0, 222, 11, 1, 0, 0, 0, 223, 224, 5, 101, 0, 0, 224, 225, 5, 120, 0, 0, 225, 226, 5, 112, 0, 0, 226, 227, 5, 108, 0, 0, 227, 228, 5, 97, 0, 0, 228, 229, 5, 105, 0, 0, 229, 230, 5, 110, 0, 0, 230, 231, 1, 0, 0, 0, 231, 232, 6, 4, 2, 0, 232, 13, 1, 0, 0, 0, 233, 234, 5, 102, 0, 0, 234, 235, 5, 114, 0, 0, 235, 236, 5, 111, 0, 0, 236, 237, 5, 109, 0, 0, 237, 238, 1, 0, 0, 0, 238, 239, 6, 5, 1, 0, 239, 15, 1, 0, 0, 0, 240, 241, 5, 103, 0, 0, 241, 242, 5, 114, 0, 0, 242, 243, 5, 111, 0, 0, 243, 244, 5, 107, 0, 0, 244, 245, 1, 0, 0, 0, 245, 246, 6, 6, 0, 0, 246, 17, 1, 0, 0, 0, 247, 248, 5, 105, 0, 0, 248, 249, 5, 110, 0, 0, 249, 250, 5, 108, 0, 0, 250, 251, 5, 105, 0, 0, 251, 252, 5, 110, 0, 0, 252, 253, 5, 101, 0, 0, 253, 254, 5, 115, 0, 0, 254, 255, 5, 116, 0, 0, 255, 256, 5, 97, 0, 0, 256, 257, 5, 116, 0, 0, 257, 258, 5, 115, 0, 0, 258, 259, 1, 0, 0, 0, 259, 260, 6, 7, 0, 0, 260, 19, 1, 0, 0, 0, 261, 262, 5, 107, 0, 0, 262, 263, 5, 101, 0, 0, 263, 264, 5, 101, 0, 0, 264, 265, 5, 112, 0, 0, 265, 266, 1, 0, 0, 0, 266, 267, 6, 8, 1, 0, 267, 21, 1, 0, 0, 0, 268, 269, 5, 108, 0, 0, 269, 270, 5, 105, 0, 0, 270, 271, 5, 109, 0, 0, 271, 272, 5, 105, 0, 0, 272, 273, 5, 116, 0, 0, 273, 274, 1, 0, 0, 0, 274, 275, 6, 9, 0, 0, 275, 23, 1, 0, 0, 0, 276, 277, 5, 109, 0, 0, 277, 278, 5, 118, 0, 0, 278, 279, 5, 95, 0, 0, 279, 280, 5, 101, 0, 0, 280, 281, 5, 120, 0, 0, 281, 282, 5, 112, 0, 0, 282, 283, 5, 97, 0, 0, 283, 284, 5, 110, 0, 0, 284, 285, 5, 100, 0, 0, 285, 286, 1, 0, 0, 0, 286, 287, 6, 10, 1, 0, 287, 25, 1, 0, 0, 0, 288, 289, 5, 112, 0, 0, 289, 290, 5, 114, 0, 0, 290, 291, 5, 111, 0, 0, 291, 292, 5, 106, 0, 0, 292, 293, 5, 101, 0, 0, 293, 294, 5, 99, 0, 0, 294, 295, 5, 116, 0, 0, 295, 296, 1, 0, 0, 0, 296, 297, 6, 11, 1, 0, 297, 27, 1, 0, 0, 0, 298, 299, 5, 114, 0, 0, 299, 300, 5, 101, 0, 0, 300, 301, 5, 110, 0, 0, 301, 302, 5, 97, 0, 0, 302, 303, 5, 109, 0, 0, 303, 304, 5, 101, 0, 0, 304, 305, 1, 0, 0, 0, 305, 306, 6, 12, 1, 0, 306, 29, 1, 0, 0, 0, 307, 308, 5, 114, 0, 0, 308, 309, 5, 111, 0, 0, 309, 310, 5, 119, 0, 0, 310, 311, 1, 0, 0, 0, 311, 312, 6, 13, 0, 0, 312, 31, 1, 0, 0, 0, 313, 314, 5, 115, 0, 0, 314, 315, 5, 104, 0, 0, 315, 316, 5, 111, 0, 0, 316, 317, 5, 119, 0, 0, 317, 318, 1, 0, 0, 0, 318, 319, 6, 14, 0, 0, 319, 33, 1, 0, 0, 0, 320, 321, 5, 115, 0, 0, 321, 322, 5, 111, 0, 0, 322, 323, 5, 114, 0, 0, 323, 324, 5, 116, 0, 0, 324, 325, 1, 0, 0, 0, 325, 326, 6, 15, 0, 0, 326, 35, 1, 0, 0, 0, 327, 328, 5, 115, 0, 0, 328, 329, 5, 116, 0, 0, 329, 330, 5, 97, 0, 0, 330, 331, 5, 116, 0, 0, 331, 332, 5, 115, 0, 0, 332, 333, 1, 0, 0, 0, 333, 334, 6, 16, 0, 0, 334, 37, 1, 0, 0, 0, 335, 336, 5, 119, 0, 0, 336, 337, 5, 104, 0, 0, 337, 338, 5, 101, 0, 0, 338, 339, 5, 114, 0, 0, 339, 340, 5, 101, 0, 0, 340, 341, 1, 0, 0, 0, 341, 342, 6, 17, 0, 0, 342, 39, 1, 0, 0, 0, 343, 345, 8, 0, 0, 0, 344, 343, 1, 0, 0, 0, 345, 346, 1, 0, 0, 0, 346, 344, 1, 0, 0, 0, 346, 347, 1, 0, 0, 0, 347, 348, 1, 0, 0, 0, 348, 349, 6, 18, 0, 0, 349, 41, 1, 0, 0, 0, 350, 351, 5, 47, 0, 0, 351, 352, 5, 47, 0, 0, 352, 356, 1, 0, 0, 0, 353, 355, 8, 1, 0, 0, 354, 353, 1, 0, 0, 0, 355, 358, 1, 0, 0, 0, 356, 354, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 360, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 359, 361, 5, 13, 0, 0, 360, 359, 1, 0, 0, 0, 360, 361, 1, 0, 0, 0, 361, 363, 1, 0, 0, 0, 362, 364, 5, 10, 0, 0, 363, 362, 1, 0, 0, 0, 363, 364, 1, 0, 0, 0, 364, 365, 1, 0, 0, 0, 365, 366, 6, 19, 3, 0, 366, 43, 1, 0, 0, 0, 367, 368, 5, 47, 0, 0, 368, 369, 5, 42, 0, 0, 369, 374, 1, 0, 0, 0, 370, 373, 3, 44, 20, 0, 371, 373, 9, 0, 0, 0, 372, 370, 1, 0, 0, 0, 372, 371, 1, 0, 0, 0, 373, 376, 1, 0, 0, 0, 374, 375, 1, 0, 0, 0, 374, 372, 1, 0, 0, 0, 375, 377, 1, 0, 0, 0, 376, 374, 1, 0, 0, 0, 377, 378, 5, 42, 0, 0, 378, 379, 5, 47, 0, 0, 379, 380, 1, 0, 0, 0, 380, 381, 6, 20, 3, 0, 381, 45, 1, 0, 0, 0, 382, 384, 7, 2, 0, 0, 383, 382, 1, 0, 0, 0, 384, 385, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 385, 386, 1, 0, 0, 0, 386, 387, 1, 0, 0, 0, 387, 388, 6, 21, 3, 0, 388, 47, 1, 0, 0, 0, 389, 390, 5, 91, 0, 0, 390, 391, 1, 0, 0, 0, 391, 392, 6, 22, 4, 0, 392, 393, 6, 22, 5, 0, 393, 49, 1, 0, 0, 0, 394, 395, 5, 124, 0, 0, 395, 396, 1, 0, 0, 0, 396, 397, 6, 23, 6, 0, 397, 398, 6, 23, 7, 0, 398, 51, 1, 0, 0, 0, 399, 400, 3, 46, 21, 0, 400, 401, 1, 0, 0, 0, 401, 402, 6, 24, 3, 0, 402, 53, 1, 0, 0, 0, 403, 404, 3, 42, 19, 0, 404, 405, 1, 0, 0, 0, 405, 406, 6, 25, 3, 0, 406, 55, 1, 0, 0, 0, 407, 408, 3, 44, 20, 0, 408, 409, 1, 0, 0, 0, 409, 410, 6, 26, 3, 0, 410, 57, 1, 0, 0, 0, 411, 412, 5, 124, 0, 0, 412, 413, 1, 0, 0, 0, 413, 414, 6, 27, 7, 0, 414, 59, 1, 0, 0, 0, 415, 416, 7, 3, 0, 0, 416, 61, 1, 0, 0, 0, 417, 418, 7, 4, 0, 0, 418, 63, 1, 0, 0, 0, 419, 420, 5, 92, 0, 0, 420, 421, 7, 5, 0, 0, 421, 65, 1, 0, 0, 0, 422, 423, 8, 6, 0, 0, 423, 67, 1, 0, 0, 0, 424, 426, 7, 7, 0, 0, 425, 427, 7, 8, 0, 0, 426, 425, 1, 0, 0, 0, 426, 427, 1, 0, 0, 0, 427, 429, 1, 0, 0, 0, 428, 430, 3, 60, 28, 0, 429, 428, 1, 0, 0, 0, 430, 431, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 431, 432, 1, 0, 0, 0, 432, 69, 1, 0, 0, 0, 433, 438, 5, 34, 0, 0, 434, 437, 3, 64, 30, 0, 435, 437, 3, 66, 31, 0, 436, 434, 1, 0, 0, 0, 436, 435, 1, 0, 0, 0, 437, 440, 1, 0, 0, 0, 438, 436, 1, 0, 0, 0, 438, 439, 1, 0, 0, 0, 439, 441, 1, 0, 0, 0, 440, 438, 1, 0, 0, 0, 441, 463, 5, 34, 0, 0, 442, 443, 5, 34, 0, 0, 443, 444, 5, 34, 0, 0, 444, 445, 5, 34, 0, 0, 445, 449, 1, 0, 0, 0, 446, 448, 8, 1, 0, 0, 447, 446, 1, 0, 0, 0, 448, 451, 1, 0, 0, 0, 449, 450, 1, 0, 0, 0, 449, 447, 1, 0, 0, 0, 450, 452, 1, 0, 0, 0, 451, 449, 1, 0, 0, 0, 452, 453, 5, 34, 0, 0, 453, 454, 5, 34, 0, 0, 454, 455, 5, 34, 0, 0, 455, 457, 1, 0, 0, 0, 456, 458, 5, 34, 0, 0, 457, 456, 1, 0, 0, 0, 457, 458, 1, 0, 0, 0, 458, 460, 1, 0, 0, 0, 459, 461, 5, 34, 0, 0, 460, 459, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 463, 1, 0, 0, 0, 462, 433, 1, 0, 0, 0, 462, 442, 1, 0, 0, 0, 463, 71, 1, 0, 0, 0, 464, 466, 3, 60, 28, 0, 465, 464, 1, 0, 0, 0, 466, 467, 1, 0, 0, 0, 467, 465, 1, 0, 0, 0, 467, 468, 1, 0, 0, 0, 468, 73, 1, 0, 0, 0, 469, 471, 3, 60, 28, 0, 470, 469, 1, 0, 0, 0, 471, 472, 1, 0, 0, 0, 472, 470, 1, 0, 0, 0, 472, 473, 1, 0, 0, 0, 473, 474, 1, 0, 0, 0, 474, 478, 3, 88, 42, 0, 475, 477, 3, 60, 28, 0, 476, 475, 1, 0, 0, 0, 477, 480, 1, 0, 0, 0, 478, 476, 1, 0, 0, 0, 478, 479, 1, 0, 0, 0, 479, 512, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 483, 3, 88, 42, 0, 482, 484, 3, 60, 28, 0, 483, 482, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 483, 1, 0, 0, 0, 485, 486, 1, 0, 0, 0, 486, 512, 1, 0, 0, 0, 487, 489, 3, 60, 28, 0, 488, 487, 1, 0, 0, 0, 489, 490, 1, 0, 0, 0, 490, 488, 1, 0, 0, 0, 490, 491, 1, 0, 0, 0, 491, 499, 1, 0, 0, 0, 492, 496, 3, 88, 42, 0, 493, 495, 3, 60, 28, 0, 494, 493, 1, 0, 0, 0, 495, 498, 1, 0, 0, 0, 496, 494, 1, 0, 0, 0, 496, 497, 1, 0, 0, 0, 497, 500, 1, 0, 0, 0, 498, 496, 1, 0, 0, 0, 499, 492, 1, 0, 0, 0, 499, 500, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 502, 3, 68, 32, 0, 502, 512, 1, 0, 0, 0, 503, 505, 3, 88, 42, 0, 504, 506, 3, 60, 28, 0, 505, 504, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 505, 1, 0, 0, 0, 507, 508, 1, 0, 0, 0, 508, 509, 1, 0, 0, 0, 509, 510, 3, 68, 32, 0, 510, 512, 1, 0, 0, 0, 511, 470, 1, 0, 0, 0, 511, 481, 1, 0, 0, 0, 511, 488, 1, 0, 0, 0, 511, 503, 1, 0, 0, 0, 512, 75, 1, 0, 0, 0, 513, 514, 5, 98, 0, 0, 514, 515, 5, 121, 0, 0, 515, 77, 1, 0, 0, 0, 516, 517, 5, 97, 0, 0, 517, 518, 5, 110, 0, 0, 518, 519, 5, 100, 0, 0, 519, 79, 1, 0, 0, 0, 520, 521, 5, 97, 0, 0, 521, 522, 5, 115, 0, 0, 522, 523, 5, 99, 0, 0, 523, 81, 1, 0, 0, 0, 524, 525, 5, 61, 0, 0, 525, 83, 1, 0, 0, 0, 526, 527, 5, 44, 0, 0, 527, 85, 1, 0, 0, 0, 528, 529, 5, 100, 0, 0, 529, 530, 5, 101, 0, 0, 530, 531, 5, 115, 0, 0, 531, 532, 5, 99, 0, 0, 532, 87, 1, 0, 0, 0, 533, 534, 5, 46, 0, 0, 534, 89, 1, 0, 0, 0, 535, 536, 5, 102, 0, 0, 536, 537, 5, 97, 0, 0, 537, 538, 5, 108, 0, 0, 538, 539, 5, 115, 0, 0, 539, 540, 5, 101, 0, 0, 540, 91, 1, 0, 0, 0, 541, 542, 5, 102, 0, 0, 542, 543, 5, 105, 0, 0, 543, 544, 5, 114, 0, 0, 544, 545, 5, 115, 0, 0, 545, 546, 5, 116, 0, 0, 546, 93, 1, 0, 0, 0, 547, 548, 5, 108, 0, 0, 548, 549, 5, 97, 0, 0, 549, 550, 5, 115, 0, 0, 550, 551, 5, 116, 0, 0, 551, 95, 1, 0, 0, 0, 552, 553, 5, 40, 0, 0, 553, 97, 1, 0, 0, 0, 554, 555, 5, 105, 0, 0, 555, 556, 5, 110, 0, 0, 556, 99, 1, 0, 0, 0, 557, 558, 5, 105, 0, 0, 558, 559, 5, 115, 0, 0, 559, 101, 1, 0, 0, 0, 560, 561, 5, 108, 0, 0, 561, 562, 5, 105, 0, 0, 562, 563, 5, 107, 0, 0, 563, 564, 5, 101, 0, 0, 564, 103, 1, 0, 0, 0, 565, 566, 5, 110, 0, 0, 566, 567, 5, 111, 0, 0, 567, 568, 5, 116, 0, 0, 568, 105, 1, 0, 0, 0, 569, 570, 5, 110, 0, 0, 570, 571, 5, 117, 0, 0, 571, 572, 5, 108, 0, 0, 572, 573, 5, 108, 0, 0, 573, 107, 1, 0, 0, 0, 574, 575, 5, 110, 0, 0, 575, 576, 5, 117, 0, 0, 576, 577, 5, 108, 0, 0, 577, 578, 5, 108, 0, 0, 578, 579, 5, 115, 0, 0, 579, 109, 1, 0, 0, 0, 580, 581, 5, 111, 0, 0, 581, 582, 5, 114, 0, 0, 582, 111, 1, 0, 0, 0, 583, 584, 5, 63, 0, 0, 584, 113, 1, 0, 0, 0, 585, 586, 5, 114, 0, 0, 586, 587, 5, 108, 0, 0, 587, 588, 5, 105, 0, 0, 588, 589, 5, 107, 0, 0, 589, 590, 5, 101, 0, 0, 590, 115, 1, 0, 0, 0, 591, 592, 5, 41, 0, 0, 592, 117, 1, 0, 0, 0, 593, 594, 5, 116, 0, 0, 594, 595, 5, 114, 0, 0, 595, 596, 5, 117, 0, 0, 596, 597, 5, 101, 0, 0, 597, 119, 1, 0, 0, 0, 598, 599, 5, 105, 0, 0, 599, 600, 5, 110, 0, 0, 600, 601, 5, 102, 0, 0, 601, 602, 5, 111, 0, 0, 602, 121, 1, 0, 0, 0, 603, 604, 5, 102, 0, 0, 604, 605, 5, 117, 0, 0, 605, 606, 5, 110, 0, 0, 606, 607, 5, 99, 0, 0, 607, 608, 5, 116, 0, 0, 608, 609, 5, 105, 0, 0, 609, 610, 5, 111, 0, 0, 610, 611, 5, 110, 0, 0, 611, 612, 5, 115, 0, 0, 612, 123, 1, 0, 0, 0, 613, 614, 5, 61, 0, 0, 614, 615, 5, 61, 0, 0, 615, 125, 1, 0, 0, 0, 616, 617, 5, 33, 0, 0, 617, 618, 5, 61, 0, 0, 618, 127, 1, 0, 0, 0, 619, 620, 5, 60, 0, 0, 620, 129, 1, 0, 0, 0, 621, 622, 5, 60, 0, 0, 622, 623, 5, 61, 0, 0, 623, 131, 1, 0, 0, 0, 624, 625, 5, 62, 0, 0, 625, 133, 1, 0, 0, 0, 626, 627, 5, 62, 0, 0, 627, 628, 5, 61, 0, 0, 628, 135, 1, 0, 0, 0, 629, 630, 5, 43, 0, 0, 630, 137, 1, 0, 0, 0, 631, 632, 5, 45, 0, 0, 632, 139, 1, 0, 0, 0, 633, 634, 5, 42, 0, 0, 634, 141, 1, 0, 0, 0, 635, 636, 5, 47, 0, 0, 636, 143, 1, 0, 0, 0, 637, 638, 5, 37, 0, 0, 638, 145, 1, 0, 0, 0, 639, 640, 5, 91, 0, 0, 640, 641, 1, 0, 0, 0, 641, 642, 6, 71, 0, 0, 642, 643, 6, 71, 0, 0, 643, 147, 1, 0, 0, 0, 644, 645, 5, 93, 0, 0, 645, 646, 1, 0, 0, 0, 646, 647, 6, 72, 7, 0, 647, 648, 6, 72, 7, 0, 648, 149, 1, 0, 0, 0, 649, 655, 3, 62, 29, 0, 650, 654, 3, 62, 29, 0, 651, 654, 3, 60, 28, 0, 652, 654, 5, 95, 0, 0, 653, 650, 1, 0, 0, 0, 653, 651, 1, 0, 0, 0, 653, 652, 1, 0, 0, 0, 654, 657, 1, 0, 0, 0, 655, 653, 1, 0, 0, 0, 655, 656, 1, 0, 0, 0, 656, 667, 1, 0, 0, 0, 657, 655, 1, 0, 0, 0, 658, 662, 7, 9, 0, 0, 659, 663, 3, 62, 29, 0, 660, 663, 3, 60, 28, 0, 661, 663, 5, 95, 0, 0, 662, 659, 1, 0, 0, 0, 662, 660, 1, 0, 0, 0, 662, 661, 1, 0, 0, 0, 663, 664, 1, 0, 0, 0, 664, 662, 1, 0, 0, 0, 664, 665, 1, 0, 0, 0, 665, 667, 1, 0, 0, 0, 666, 649, 1, 0, 0, 0, 666, 658, 1, 0, 0, 0, 667, 151, 1, 0, 0, 0, 668, 674, 5, 96, 0, 0, 669, 673, 8, 10, 0, 0, 670, 671, 5, 96, 0, 0, 671, 673, 5, 96, 0, 0, 672, 669, 1, 0, 0, 0, 672, 670, 1, 0, 0, 0, 673, 676, 1, 0, 0, 0, 674, 672, 1, 0, 0, 0, 674, 675, 1, 0, 0, 0, 675, 677, 1, 0, 0, 0, 676, 674, 1, 0, 0, 0, 677, 678, 5, 96, 0, 0, 678, 153, 1, 0, 0, 0, 679, 680, 3, 42, 19, 0, 680, 681, 1, 0, 0, 0, 681, 682, 6, 75, 3, 0, 682, 155, 1, 0, 0, 0, 683, 684, 3, 44, 20, 0, 684, 685, 1, 0, 0, 0, 685, 686, 6, 76, 3, 0, 686, 157, 1, 0, 0, 0, 687, 688, 3, 46, 21, 0, 688, 689, 1, 0, 0, 0, 689, 690, 6, 77, 3, 0, 690, 159, 1, 0, 0, 0, 691, 692, 5, 124, 0, 0, 692, 693, 1, 0, 0, 0, 693, 694, 6, 78, 6, 0, 694, 695, 6, 78, 7, 0, 695, 161, 1, 0, 0, 0, 696, 697, 5, 91, 0, 0, 697, 698, 1, 0, 0, 0, 698, 699, 6, 79, 4, 0, 699, 700, 6, 79, 1, 0, 700, 701, 6, 79, 1, 0, 701, 163, 1, 0, 0, 0, 702, 703, 5, 93, 0, 0, 703, 704, 1, 0, 0, 0, 704, 705, 6, 80, 7, 0, 705, 706, 6, 80, 7, 0, 706, 707, 6, 80, 8, 0, 707, 165, 1, 0, 0, 0, 708, 709, 5, 44, 0, 0, 709, 710, 1, 0, 0, 0, 710, 711, 6, 81, 9, 0, 711, 167, 1, 0, 0, 0, 712, 713, 5, 61, 0, 0, 713, 714, 1, 0, 0, 0, 714, 715, 6, 82, 10, 0, 715, 169, 1, 0, 0, 0, 716, 717, 5, 97, 0, 0, 717, 718, 5, 115, 0, 0, 718, 171, 1, 0, 0, 0, 719, 720, 5, 109, 0, 0, 720, 721, 5, 101, 0, 0, 721, 722, 5, 116, 0, 0, 722, 723, 5, 97, 0, 0, 723, 724, 5, 100, 0, 0, 724, 725, 5, 97, 0, 0, 725, 726, 5, 116, 0, 0, 726, 727, 5, 97, 0, 0, 727, 173, 1, 0, 0, 0, 728, 729, 5, 111, 0, 0, 729, 730, 5, 110, 0, 0, 730, 175, 1, 0, 0, 0, 731, 732, 5, 119, 0, 0, 732, 733, 5, 105, 0, 0, 733, 734, 5, 116, 0, 0, 734, 735, 5, 104, 0, 0, 735, 177, 1, 0, 0, 0, 736, 738, 3, 180, 88, 0, 737, 736, 1, 0, 0, 0, 738, 739, 1, 0, 0, 0, 739, 737, 1, 0, 0, 0, 739, 740, 1, 0, 0, 0, 740, 179, 1, 0, 0, 0, 741, 743, 8, 11, 0, 0, 742, 741, 1, 0, 0, 0, 743, 744, 1, 0, 0, 0, 744, 742, 1, 0, 0, 0, 744, 745, 1, 0, 0, 0, 745, 749, 1, 0, 0, 0, 746, 747, 5, 47, 0, 0, 747, 749, 8, 12, 0, 0, 748, 742, 1, 0, 0, 0, 748, 746, 1, 0, 0, 0, 749, 181, 1, 0, 0, 0, 750, 751, 3, 152, 74, 0, 751, 183, 1, 0, 0, 0, 752, 753, 3, 42, 19, 0, 753, 754, 1, 0, 0, 0, 754, 755, 6, 90, 3, 0, 755, 185, 1, 0, 0, 0, 756, 757, 3, 44, 20, 0, 757, 758, 1, 0, 0, 0, 758, 759, 6, 91, 3, 0, 759, 187, 1, 0, 0, 0, 760, 761, 3, 46, 21, 0, 761, 762, 1, 0, 0, 0, 762, 763, 6, 92, 3, 0, 763, 189, 1, 0, 0, 0, 38, 0, 1, 2, 3, 346, 356, 360, 363, 372, 374, 385, 426, 431, 436, 438, 449, 457, 460, 462, 467, 472, 478, 485, 490, 496, 499, 507, 511, 653, 655, 662, 664, 666, 672, 674, 739, 744, 748, 11, 5, 2, 0, 5, 3, 0, 5, 1, 0, 0, 1, 0, 7, 65, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 66, 0, 7, 34, 0, 7, 33, 0] \ No newline at end of file +[4, 0, 98, 1090, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 2, 49, 7, 49, 2, 50, 7, 50, 2, 51, 7, 51, 2, 52, 7, 52, 2, 53, 7, 53, 2, 54, 7, 54, 2, 55, 7, 55, 2, 56, 7, 56, 2, 57, 7, 57, 2, 58, 7, 58, 2, 59, 7, 59, 2, 60, 7, 60, 2, 61, 7, 61, 2, 62, 7, 62, 2, 63, 7, 63, 2, 64, 7, 64, 2, 65, 7, 65, 2, 66, 7, 66, 2, 67, 7, 67, 2, 68, 7, 68, 2, 69, 7, 69, 2, 70, 7, 70, 2, 71, 7, 71, 2, 72, 7, 72, 2, 73, 7, 73, 2, 74, 7, 74, 2, 75, 7, 75, 2, 76, 7, 76, 2, 77, 7, 77, 2, 78, 7, 78, 2, 79, 7, 79, 2, 80, 7, 80, 2, 81, 7, 81, 2, 82, 7, 82, 2, 83, 7, 83, 2, 84, 7, 84, 2, 85, 7, 85, 2, 86, 7, 86, 2, 87, 7, 87, 2, 88, 7, 88, 2, 89, 7, 89, 2, 90, 7, 90, 2, 91, 7, 91, 2, 92, 7, 92, 2, 93, 7, 93, 2, 94, 7, 94, 2, 95, 7, 95, 2, 96, 7, 96, 2, 97, 7, 97, 2, 98, 7, 98, 2, 99, 7, 99, 2, 100, 7, 100, 2, 101, 7, 101, 2, 102, 7, 102, 2, 103, 7, 103, 2, 104, 7, 104, 2, 105, 7, 105, 2, 106, 7, 106, 2, 107, 7, 107, 2, 108, 7, 108, 2, 109, 7, 109, 2, 110, 7, 110, 2, 111, 7, 111, 2, 112, 7, 112, 2, 113, 7, 113, 2, 114, 7, 114, 2, 115, 7, 115, 2, 116, 7, 116, 2, 117, 7, 117, 2, 118, 7, 118, 2, 119, 7, 119, 2, 120, 7, 120, 2, 121, 7, 121, 2, 122, 7, 122, 2, 123, 7, 123, 2, 124, 7, 124, 2, 125, 7, 125, 2, 126, 7, 126, 2, 127, 7, 127, 2, 128, 7, 128, 2, 129, 7, 129, 2, 130, 7, 130, 2, 131, 7, 131, 2, 132, 7, 132, 2, 133, 7, 133, 2, 134, 7, 134, 2, 135, 7, 135, 2, 136, 7, 136, 2, 137, 7, 137, 2, 138, 7, 138, 2, 139, 7, 139, 2, 140, 7, 140, 2, 141, 7, 141, 2, 142, 7, 142, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 17, 1, 18, 4, 18, 451, 8, 18, 11, 18, 12, 18, 452, 1, 18, 1, 18, 1, 19, 1, 19, 1, 19, 1, 19, 5, 19, 461, 8, 19, 10, 19, 12, 19, 464, 9, 19, 1, 19, 3, 19, 467, 8, 19, 1, 19, 3, 19, 470, 8, 19, 1, 19, 1, 19, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 5, 20, 479, 8, 20, 10, 20, 12, 20, 482, 9, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 20, 1, 21, 4, 21, 490, 8, 21, 11, 21, 12, 21, 491, 1, 21, 1, 21, 1, 22, 1, 22, 1, 22, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 24, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 26, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 1, 28, 1, 28, 1, 29, 1, 29, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 32, 1, 32, 3, 32, 533, 8, 32, 1, 32, 4, 32, 536, 8, 32, 11, 32, 12, 32, 537, 1, 33, 1, 33, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 3, 35, 547, 8, 35, 1, 36, 1, 36, 1, 37, 1, 37, 1, 37, 3, 37, 554, 8, 37, 1, 38, 1, 38, 1, 38, 5, 38, 559, 8, 38, 10, 38, 12, 38, 562, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 5, 38, 570, 8, 38, 10, 38, 12, 38, 573, 9, 38, 1, 38, 1, 38, 1, 38, 1, 38, 1, 38, 3, 38, 580, 8, 38, 1, 38, 3, 38, 583, 8, 38, 3, 38, 585, 8, 38, 1, 39, 4, 39, 588, 8, 39, 11, 39, 12, 39, 589, 1, 40, 4, 40, 593, 8, 40, 11, 40, 12, 40, 594, 1, 40, 1, 40, 5, 40, 599, 8, 40, 10, 40, 12, 40, 602, 9, 40, 1, 40, 1, 40, 4, 40, 606, 8, 40, 11, 40, 12, 40, 607, 1, 40, 4, 40, 611, 8, 40, 11, 40, 12, 40, 612, 1, 40, 1, 40, 5, 40, 617, 8, 40, 10, 40, 12, 40, 620, 9, 40, 3, 40, 622, 8, 40, 1, 40, 1, 40, 1, 40, 1, 40, 4, 40, 628, 8, 40, 11, 40, 12, 40, 629, 1, 40, 1, 40, 3, 40, 634, 8, 40, 1, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 1, 46, 1, 47, 1, 47, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 48, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 49, 1, 50, 1, 50, 1, 50, 1, 50, 1, 50, 1, 51, 1, 51, 1, 52, 1, 52, 1, 52, 1, 53, 1, 53, 1, 53, 1, 54, 1, 54, 1, 54, 1, 54, 1, 54, 1, 55, 1, 55, 1, 55, 1, 55, 1, 56, 1, 56, 1, 56, 1, 56, 1, 56, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 57, 1, 58, 1, 58, 1, 58, 1, 59, 1, 59, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 60, 1, 61, 1, 61, 1, 62, 1, 62, 1, 62, 1, 62, 1, 62, 1, 63, 1, 63, 1, 63, 1, 64, 1, 64, 1, 64, 1, 65, 1, 65, 1, 66, 1, 66, 1, 66, 1, 67, 1, 67, 1, 68, 1, 68, 1, 68, 1, 69, 1, 69, 1, 70, 1, 70, 1, 71, 1, 71, 1, 72, 1, 72, 1, 73, 1, 73, 1, 74, 1, 74, 1, 74, 1, 74, 1, 74, 1, 75, 1, 75, 1, 75, 1, 75, 1, 75, 1, 76, 1, 76, 5, 76, 759, 8, 76, 10, 76, 12, 76, 762, 9, 76, 1, 76, 1, 76, 3, 76, 766, 8, 76, 1, 76, 4, 76, 769, 8, 76, 11, 76, 12, 76, 770, 3, 76, 773, 8, 76, 1, 77, 1, 77, 4, 77, 777, 8, 77, 11, 77, 12, 77, 778, 1, 77, 1, 77, 1, 78, 1, 78, 1, 78, 1, 78, 1, 79, 1, 79, 1, 79, 1, 79, 1, 80, 1, 80, 1, 80, 1, 80, 1, 81, 1, 81, 1, 81, 1, 81, 1, 81, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 82, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 83, 1, 84, 1, 84, 1, 84, 1, 84, 1, 85, 1, 85, 1, 85, 1, 85, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 86, 1, 87, 1, 87, 1, 87, 3, 87, 832, 8, 87, 1, 88, 4, 88, 835, 8, 88, 11, 88, 12, 88, 836, 1, 89, 1, 89, 1, 89, 1, 89, 1, 90, 1, 90, 1, 90, 1, 90, 1, 91, 1, 91, 1, 91, 1, 91, 1, 92, 1, 92, 1, 92, 1, 92, 1, 93, 1, 93, 1, 93, 1, 93, 1, 93, 1, 94, 1, 94, 1, 94, 1, 94, 1, 95, 1, 95, 1, 95, 1, 95, 1, 96, 1, 96, 1, 96, 1, 96, 3, 96, 872, 8, 96, 1, 97, 1, 97, 3, 97, 876, 8, 97, 1, 97, 5, 97, 879, 8, 97, 10, 97, 12, 97, 882, 9, 97, 1, 97, 1, 97, 3, 97, 886, 8, 97, 1, 97, 4, 97, 889, 8, 97, 11, 97, 12, 97, 890, 3, 97, 893, 8, 97, 1, 98, 1, 98, 1, 98, 1, 98, 1, 99, 1, 99, 1, 99, 1, 99, 1, 100, 1, 100, 1, 100, 1, 100, 1, 101, 1, 101, 1, 101, 1, 101, 1, 102, 1, 102, 1, 102, 1, 102, 1, 102, 1, 103, 1, 103, 1, 103, 1, 103, 1, 104, 1, 104, 1, 104, 1, 104, 1, 105, 1, 105, 1, 105, 1, 105, 1, 106, 1, 106, 1, 106, 1, 107, 1, 107, 1, 107, 1, 107, 1, 108, 1, 108, 1, 108, 1, 108, 1, 109, 1, 109, 1, 109, 1, 109, 1, 110, 1, 110, 1, 110, 1, 110, 1, 111, 1, 111, 1, 111, 1, 111, 1, 112, 1, 112, 1, 112, 1, 112, 1, 112, 1, 113, 1, 113, 1, 113, 1, 113, 1, 113, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 114, 1, 115, 1, 115, 1, 115, 1, 115, 1, 116, 1, 116, 1, 116, 1, 116, 1, 117, 1, 117, 1, 117, 1, 117, 1, 118, 1, 118, 1, 118, 1, 118, 1, 119, 1, 119, 1, 119, 1, 119, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 120, 1, 121, 1, 121, 1, 121, 1, 121, 1, 122, 1, 122, 1, 122, 1, 122, 1, 123, 1, 123, 1, 123, 1, 123, 1, 124, 1, 124, 1, 124, 1, 124, 1, 125, 1, 125, 1, 125, 1, 125, 1, 126, 1, 126, 1, 126, 1, 126, 1, 127, 1, 127, 1, 127, 1, 127, 1, 128, 1, 128, 1, 128, 1, 128, 1, 129, 1, 129, 1, 129, 1, 129, 1, 130, 1, 130, 1, 130, 1, 130, 1, 130, 1, 131, 1, 131, 1, 131, 1, 131, 1, 132, 1, 132, 1, 132, 1, 132, 1, 133, 1, 133, 1, 133, 1, 133, 1, 134, 1, 134, 1, 134, 1, 134, 1, 135, 1, 135, 1, 135, 1, 135, 1, 136, 1, 136, 1, 136, 1, 136, 1, 137, 1, 137, 1, 137, 1, 137, 1, 137, 1, 138, 1, 138, 1, 138, 1, 138, 1, 138, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 139, 1, 140, 1, 140, 1, 140, 1, 140, 1, 141, 1, 141, 1, 141, 1, 141, 1, 142, 1, 142, 1, 142, 1, 142, 2, 480, 571, 0, 143, 10, 1, 12, 2, 14, 3, 16, 4, 18, 5, 20, 6, 22, 7, 24, 8, 26, 9, 28, 10, 30, 11, 32, 12, 34, 13, 36, 14, 38, 15, 40, 16, 42, 17, 44, 18, 46, 19, 48, 20, 50, 21, 52, 22, 54, 0, 56, 0, 58, 23, 60, 24, 62, 25, 64, 26, 66, 0, 68, 0, 70, 0, 72, 0, 74, 0, 76, 0, 78, 0, 80, 0, 82, 0, 84, 0, 86, 27, 88, 28, 90, 29, 92, 30, 94, 31, 96, 32, 98, 33, 100, 34, 102, 35, 104, 36, 106, 37, 108, 38, 110, 39, 112, 40, 114, 41, 116, 42, 118, 43, 120, 44, 122, 45, 124, 46, 126, 47, 128, 48, 130, 49, 132, 50, 134, 51, 136, 52, 138, 53, 140, 54, 142, 55, 144, 56, 146, 57, 148, 58, 150, 59, 152, 60, 154, 61, 156, 62, 158, 63, 160, 64, 162, 65, 164, 66, 166, 67, 168, 68, 170, 69, 172, 0, 174, 0, 176, 0, 178, 0, 180, 0, 182, 70, 184, 0, 186, 71, 188, 0, 190, 72, 192, 73, 194, 74, 196, 0, 198, 0, 200, 0, 202, 0, 204, 75, 206, 0, 208, 76, 210, 77, 212, 78, 214, 0, 216, 0, 218, 0, 220, 0, 222, 79, 224, 0, 226, 0, 228, 80, 230, 81, 232, 82, 234, 0, 236, 83, 238, 84, 240, 0, 242, 0, 244, 85, 246, 86, 248, 87, 250, 0, 252, 0, 254, 0, 256, 0, 258, 0, 260, 0, 262, 0, 264, 88, 266, 89, 268, 90, 270, 0, 272, 0, 274, 0, 276, 0, 278, 91, 280, 92, 282, 93, 284, 0, 286, 94, 288, 95, 290, 96, 292, 97, 294, 98, 10, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 12, 6, 0, 9, 10, 13, 13, 32, 32, 47, 47, 91, 91, 93, 93, 2, 0, 10, 10, 13, 13, 3, 0, 9, 10, 13, 13, 32, 32, 1, 0, 48, 57, 2, 0, 65, 90, 97, 122, 5, 0, 34, 34, 92, 92, 110, 110, 114, 114, 116, 116, 4, 0, 10, 10, 13, 13, 34, 34, 92, 92, 2, 0, 69, 69, 101, 101, 2, 0, 43, 43, 45, 45, 1, 0, 96, 96, 10, 0, 9, 10, 13, 13, 32, 32, 44, 44, 47, 47, 61, 61, 91, 91, 93, 93, 96, 96, 124, 124, 2, 0, 42, 42, 47, 47, 1112, 0, 10, 1, 0, 0, 0, 0, 12, 1, 0, 0, 0, 0, 14, 1, 0, 0, 0, 0, 16, 1, 0, 0, 0, 0, 18, 1, 0, 0, 0, 0, 20, 1, 0, 0, 0, 0, 22, 1, 0, 0, 0, 0, 24, 1, 0, 0, 0, 0, 26, 1, 0, 0, 0, 0, 28, 1, 0, 0, 0, 0, 30, 1, 0, 0, 0, 0, 32, 1, 0, 0, 0, 0, 34, 1, 0, 0, 0, 0, 36, 1, 0, 0, 0, 0, 38, 1, 0, 0, 0, 0, 40, 1, 0, 0, 0, 0, 42, 1, 0, 0, 0, 0, 44, 1, 0, 0, 0, 0, 46, 1, 0, 0, 0, 0, 48, 1, 0, 0, 0, 0, 50, 1, 0, 0, 0, 0, 52, 1, 0, 0, 0, 1, 54, 1, 0, 0, 0, 1, 56, 1, 0, 0, 0, 1, 58, 1, 0, 0, 0, 1, 60, 1, 0, 0, 0, 1, 62, 1, 0, 0, 0, 2, 64, 1, 0, 0, 0, 2, 86, 1, 0, 0, 0, 2, 88, 1, 0, 0, 0, 2, 90, 1, 0, 0, 0, 2, 92, 1, 0, 0, 0, 2, 94, 1, 0, 0, 0, 2, 96, 1, 0, 0, 0, 2, 98, 1, 0, 0, 0, 2, 100, 1, 0, 0, 0, 2, 102, 1, 0, 0, 0, 2, 104, 1, 0, 0, 0, 2, 106, 1, 0, 0, 0, 2, 108, 1, 0, 0, 0, 2, 110, 1, 0, 0, 0, 2, 112, 1, 0, 0, 0, 2, 114, 1, 0, 0, 0, 2, 116, 1, 0, 0, 0, 2, 118, 1, 0, 0, 0, 2, 120, 1, 0, 0, 0, 2, 122, 1, 0, 0, 0, 2, 124, 1, 0, 0, 0, 2, 126, 1, 0, 0, 0, 2, 128, 1, 0, 0, 0, 2, 130, 1, 0, 0, 0, 2, 132, 1, 0, 0, 0, 2, 134, 1, 0, 0, 0, 2, 136, 1, 0, 0, 0, 2, 138, 1, 0, 0, 0, 2, 140, 1, 0, 0, 0, 2, 142, 1, 0, 0, 0, 2, 144, 1, 0, 0, 0, 2, 146, 1, 0, 0, 0, 2, 148, 1, 0, 0, 0, 2, 150, 1, 0, 0, 0, 2, 152, 1, 0, 0, 0, 2, 154, 1, 0, 0, 0, 2, 156, 1, 0, 0, 0, 2, 158, 1, 0, 0, 0, 2, 160, 1, 0, 0, 0, 2, 162, 1, 0, 0, 0, 2, 164, 1, 0, 0, 0, 2, 166, 1, 0, 0, 0, 2, 168, 1, 0, 0, 0, 2, 170, 1, 0, 0, 0, 3, 172, 1, 0, 0, 0, 3, 174, 1, 0, 0, 0, 3, 176, 1, 0, 0, 0, 3, 178, 1, 0, 0, 0, 3, 180, 1, 0, 0, 0, 3, 182, 1, 0, 0, 0, 3, 186, 1, 0, 0, 0, 3, 188, 1, 0, 0, 0, 3, 190, 1, 0, 0, 0, 3, 192, 1, 0, 0, 0, 3, 194, 1, 0, 0, 0, 4, 196, 1, 0, 0, 0, 4, 198, 1, 0, 0, 0, 4, 200, 1, 0, 0, 0, 4, 204, 1, 0, 0, 0, 4, 206, 1, 0, 0, 0, 4, 208, 1, 0, 0, 0, 4, 210, 1, 0, 0, 0, 4, 212, 1, 0, 0, 0, 5, 214, 1, 0, 0, 0, 5, 216, 1, 0, 0, 0, 5, 218, 1, 0, 0, 0, 5, 220, 1, 0, 0, 0, 5, 222, 1, 0, 0, 0, 5, 224, 1, 0, 0, 0, 5, 226, 1, 0, 0, 0, 5, 228, 1, 0, 0, 0, 5, 230, 1, 0, 0, 0, 5, 232, 1, 0, 0, 0, 6, 234, 1, 0, 0, 0, 6, 236, 1, 0, 0, 0, 6, 238, 1, 0, 0, 0, 6, 240, 1, 0, 0, 0, 6, 242, 1, 0, 0, 0, 6, 244, 1, 0, 0, 0, 6, 246, 1, 0, 0, 0, 6, 248, 1, 0, 0, 0, 7, 250, 1, 0, 0, 0, 7, 252, 1, 0, 0, 0, 7, 254, 1, 0, 0, 0, 7, 256, 1, 0, 0, 0, 7, 258, 1, 0, 0, 0, 7, 260, 1, 0, 0, 0, 7, 262, 1, 0, 0, 0, 7, 264, 1, 0, 0, 0, 7, 266, 1, 0, 0, 0, 7, 268, 1, 0, 0, 0, 8, 270, 1, 0, 0, 0, 8, 272, 1, 0, 0, 0, 8, 274, 1, 0, 0, 0, 8, 276, 1, 0, 0, 0, 8, 278, 1, 0, 0, 0, 8, 280, 1, 0, 0, 0, 8, 282, 1, 0, 0, 0, 9, 284, 1, 0, 0, 0, 9, 286, 1, 0, 0, 0, 9, 288, 1, 0, 0, 0, 9, 290, 1, 0, 0, 0, 9, 292, 1, 0, 0, 0, 9, 294, 1, 0, 0, 0, 10, 296, 1, 0, 0, 0, 12, 306, 1, 0, 0, 0, 14, 313, 1, 0, 0, 0, 16, 322, 1, 0, 0, 0, 18, 329, 1, 0, 0, 0, 20, 339, 1, 0, 0, 0, 22, 346, 1, 0, 0, 0, 24, 353, 1, 0, 0, 0, 26, 367, 1, 0, 0, 0, 28, 374, 1, 0, 0, 0, 30, 382, 1, 0, 0, 0, 32, 394, 1, 0, 0, 0, 34, 404, 1, 0, 0, 0, 36, 413, 1, 0, 0, 0, 38, 419, 1, 0, 0, 0, 40, 426, 1, 0, 0, 0, 42, 433, 1, 0, 0, 0, 44, 441, 1, 0, 0, 0, 46, 450, 1, 0, 0, 0, 48, 456, 1, 0, 0, 0, 50, 473, 1, 0, 0, 0, 52, 489, 1, 0, 0, 0, 54, 495, 1, 0, 0, 0, 56, 500, 1, 0, 0, 0, 58, 505, 1, 0, 0, 0, 60, 509, 1, 0, 0, 0, 62, 513, 1, 0, 0, 0, 64, 517, 1, 0, 0, 0, 66, 521, 1, 0, 0, 0, 68, 523, 1, 0, 0, 0, 70, 525, 1, 0, 0, 0, 72, 528, 1, 0, 0, 0, 74, 530, 1, 0, 0, 0, 76, 539, 1, 0, 0, 0, 78, 541, 1, 0, 0, 0, 80, 546, 1, 0, 0, 0, 82, 548, 1, 0, 0, 0, 84, 553, 1, 0, 0, 0, 86, 584, 1, 0, 0, 0, 88, 587, 1, 0, 0, 0, 90, 633, 1, 0, 0, 0, 92, 635, 1, 0, 0, 0, 94, 638, 1, 0, 0, 0, 96, 642, 1, 0, 0, 0, 98, 646, 1, 0, 0, 0, 100, 648, 1, 0, 0, 0, 102, 650, 1, 0, 0, 0, 104, 655, 1, 0, 0, 0, 106, 657, 1, 0, 0, 0, 108, 663, 1, 0, 0, 0, 110, 669, 1, 0, 0, 0, 112, 674, 1, 0, 0, 0, 114, 676, 1, 0, 0, 0, 116, 679, 1, 0, 0, 0, 118, 682, 1, 0, 0, 0, 120, 687, 1, 0, 0, 0, 122, 691, 1, 0, 0, 0, 124, 696, 1, 0, 0, 0, 126, 702, 1, 0, 0, 0, 128, 705, 1, 0, 0, 0, 130, 707, 1, 0, 0, 0, 132, 713, 1, 0, 0, 0, 134, 715, 1, 0, 0, 0, 136, 720, 1, 0, 0, 0, 138, 723, 1, 0, 0, 0, 140, 726, 1, 0, 0, 0, 142, 728, 1, 0, 0, 0, 144, 731, 1, 0, 0, 0, 146, 733, 1, 0, 0, 0, 148, 736, 1, 0, 0, 0, 150, 738, 1, 0, 0, 0, 152, 740, 1, 0, 0, 0, 154, 742, 1, 0, 0, 0, 156, 744, 1, 0, 0, 0, 158, 746, 1, 0, 0, 0, 160, 751, 1, 0, 0, 0, 162, 772, 1, 0, 0, 0, 164, 774, 1, 0, 0, 0, 166, 782, 1, 0, 0, 0, 168, 786, 1, 0, 0, 0, 170, 790, 1, 0, 0, 0, 172, 794, 1, 0, 0, 0, 174, 799, 1, 0, 0, 0, 176, 805, 1, 0, 0, 0, 178, 811, 1, 0, 0, 0, 180, 815, 1, 0, 0, 0, 182, 819, 1, 0, 0, 0, 184, 831, 1, 0, 0, 0, 186, 834, 1, 0, 0, 0, 188, 838, 1, 0, 0, 0, 190, 842, 1, 0, 0, 0, 192, 846, 1, 0, 0, 0, 194, 850, 1, 0, 0, 0, 196, 854, 1, 0, 0, 0, 198, 859, 1, 0, 0, 0, 200, 863, 1, 0, 0, 0, 202, 871, 1, 0, 0, 0, 204, 892, 1, 0, 0, 0, 206, 894, 1, 0, 0, 0, 208, 898, 1, 0, 0, 0, 210, 902, 1, 0, 0, 0, 212, 906, 1, 0, 0, 0, 214, 910, 1, 0, 0, 0, 216, 915, 1, 0, 0, 0, 218, 919, 1, 0, 0, 0, 220, 923, 1, 0, 0, 0, 222, 927, 1, 0, 0, 0, 224, 930, 1, 0, 0, 0, 226, 934, 1, 0, 0, 0, 228, 938, 1, 0, 0, 0, 230, 942, 1, 0, 0, 0, 232, 946, 1, 0, 0, 0, 234, 950, 1, 0, 0, 0, 236, 955, 1, 0, 0, 0, 238, 960, 1, 0, 0, 0, 240, 967, 1, 0, 0, 0, 242, 971, 1, 0, 0, 0, 244, 975, 1, 0, 0, 0, 246, 979, 1, 0, 0, 0, 248, 983, 1, 0, 0, 0, 250, 987, 1, 0, 0, 0, 252, 993, 1, 0, 0, 0, 254, 997, 1, 0, 0, 0, 256, 1001, 1, 0, 0, 0, 258, 1005, 1, 0, 0, 0, 260, 1009, 1, 0, 0, 0, 262, 1013, 1, 0, 0, 0, 264, 1017, 1, 0, 0, 0, 266, 1021, 1, 0, 0, 0, 268, 1025, 1, 0, 0, 0, 270, 1029, 1, 0, 0, 0, 272, 1034, 1, 0, 0, 0, 274, 1038, 1, 0, 0, 0, 276, 1042, 1, 0, 0, 0, 278, 1046, 1, 0, 0, 0, 280, 1050, 1, 0, 0, 0, 282, 1054, 1, 0, 0, 0, 284, 1058, 1, 0, 0, 0, 286, 1063, 1, 0, 0, 0, 288, 1068, 1, 0, 0, 0, 290, 1078, 1, 0, 0, 0, 292, 1082, 1, 0, 0, 0, 294, 1086, 1, 0, 0, 0, 296, 297, 5, 100, 0, 0, 297, 298, 5, 105, 0, 0, 298, 299, 5, 115, 0, 0, 299, 300, 5, 115, 0, 0, 300, 301, 5, 101, 0, 0, 301, 302, 5, 99, 0, 0, 302, 303, 5, 116, 0, 0, 303, 304, 1, 0, 0, 0, 304, 305, 6, 0, 0, 0, 305, 11, 1, 0, 0, 0, 306, 307, 5, 100, 0, 0, 307, 308, 5, 114, 0, 0, 308, 309, 5, 111, 0, 0, 309, 310, 5, 112, 0, 0, 310, 311, 1, 0, 0, 0, 311, 312, 6, 1, 1, 0, 312, 13, 1, 0, 0, 0, 313, 314, 5, 101, 0, 0, 314, 315, 5, 110, 0, 0, 315, 316, 5, 114, 0, 0, 316, 317, 5, 105, 0, 0, 317, 318, 5, 99, 0, 0, 318, 319, 5, 104, 0, 0, 319, 320, 1, 0, 0, 0, 320, 321, 6, 2, 2, 0, 321, 15, 1, 0, 0, 0, 322, 323, 5, 101, 0, 0, 323, 324, 5, 118, 0, 0, 324, 325, 5, 97, 0, 0, 325, 326, 5, 108, 0, 0, 326, 327, 1, 0, 0, 0, 327, 328, 6, 3, 0, 0, 328, 17, 1, 0, 0, 0, 329, 330, 5, 101, 0, 0, 330, 331, 5, 120, 0, 0, 331, 332, 5, 112, 0, 0, 332, 333, 5, 108, 0, 0, 333, 334, 5, 97, 0, 0, 334, 335, 5, 105, 0, 0, 335, 336, 5, 110, 0, 0, 336, 337, 1, 0, 0, 0, 337, 338, 6, 4, 3, 0, 338, 19, 1, 0, 0, 0, 339, 340, 5, 102, 0, 0, 340, 341, 5, 114, 0, 0, 341, 342, 5, 111, 0, 0, 342, 343, 5, 109, 0, 0, 343, 344, 1, 0, 0, 0, 344, 345, 6, 5, 4, 0, 345, 21, 1, 0, 0, 0, 346, 347, 5, 103, 0, 0, 347, 348, 5, 114, 0, 0, 348, 349, 5, 111, 0, 0, 349, 350, 5, 107, 0, 0, 350, 351, 1, 0, 0, 0, 351, 352, 6, 6, 0, 0, 352, 23, 1, 0, 0, 0, 353, 354, 5, 105, 0, 0, 354, 355, 5, 110, 0, 0, 355, 356, 5, 108, 0, 0, 356, 357, 5, 105, 0, 0, 357, 358, 5, 110, 0, 0, 358, 359, 5, 101, 0, 0, 359, 360, 5, 115, 0, 0, 360, 361, 5, 116, 0, 0, 361, 362, 5, 97, 0, 0, 362, 363, 5, 116, 0, 0, 363, 364, 5, 115, 0, 0, 364, 365, 1, 0, 0, 0, 365, 366, 6, 7, 0, 0, 366, 25, 1, 0, 0, 0, 367, 368, 5, 107, 0, 0, 368, 369, 5, 101, 0, 0, 369, 370, 5, 101, 0, 0, 370, 371, 5, 112, 0, 0, 371, 372, 1, 0, 0, 0, 372, 373, 6, 8, 1, 0, 373, 27, 1, 0, 0, 0, 374, 375, 5, 108, 0, 0, 375, 376, 5, 105, 0, 0, 376, 377, 5, 109, 0, 0, 377, 378, 5, 105, 0, 0, 378, 379, 5, 116, 0, 0, 379, 380, 1, 0, 0, 0, 380, 381, 6, 9, 0, 0, 381, 29, 1, 0, 0, 0, 382, 383, 5, 109, 0, 0, 383, 384, 5, 118, 0, 0, 384, 385, 5, 95, 0, 0, 385, 386, 5, 101, 0, 0, 386, 387, 5, 120, 0, 0, 387, 388, 5, 112, 0, 0, 388, 389, 5, 97, 0, 0, 389, 390, 5, 110, 0, 0, 390, 391, 5, 100, 0, 0, 391, 392, 1, 0, 0, 0, 392, 393, 6, 10, 5, 0, 393, 31, 1, 0, 0, 0, 394, 395, 5, 112, 0, 0, 395, 396, 5, 114, 0, 0, 396, 397, 5, 111, 0, 0, 397, 398, 5, 106, 0, 0, 398, 399, 5, 101, 0, 0, 399, 400, 5, 99, 0, 0, 400, 401, 5, 116, 0, 0, 401, 402, 1, 0, 0, 0, 402, 403, 6, 11, 1, 0, 403, 33, 1, 0, 0, 0, 404, 405, 5, 114, 0, 0, 405, 406, 5, 101, 0, 0, 406, 407, 5, 110, 0, 0, 407, 408, 5, 97, 0, 0, 408, 409, 5, 109, 0, 0, 409, 410, 5, 101, 0, 0, 410, 411, 1, 0, 0, 0, 411, 412, 6, 12, 6, 0, 412, 35, 1, 0, 0, 0, 413, 414, 5, 114, 0, 0, 414, 415, 5, 111, 0, 0, 415, 416, 5, 119, 0, 0, 416, 417, 1, 0, 0, 0, 417, 418, 6, 13, 0, 0, 418, 37, 1, 0, 0, 0, 419, 420, 5, 115, 0, 0, 420, 421, 5, 104, 0, 0, 421, 422, 5, 111, 0, 0, 422, 423, 5, 119, 0, 0, 423, 424, 1, 0, 0, 0, 424, 425, 6, 14, 7, 0, 425, 39, 1, 0, 0, 0, 426, 427, 5, 115, 0, 0, 427, 428, 5, 111, 0, 0, 428, 429, 5, 114, 0, 0, 429, 430, 5, 116, 0, 0, 430, 431, 1, 0, 0, 0, 431, 432, 6, 15, 0, 0, 432, 41, 1, 0, 0, 0, 433, 434, 5, 115, 0, 0, 434, 435, 5, 116, 0, 0, 435, 436, 5, 97, 0, 0, 436, 437, 5, 116, 0, 0, 437, 438, 5, 115, 0, 0, 438, 439, 1, 0, 0, 0, 439, 440, 6, 16, 0, 0, 440, 43, 1, 0, 0, 0, 441, 442, 5, 119, 0, 0, 442, 443, 5, 104, 0, 0, 443, 444, 5, 101, 0, 0, 444, 445, 5, 114, 0, 0, 445, 446, 5, 101, 0, 0, 446, 447, 1, 0, 0, 0, 447, 448, 6, 17, 0, 0, 448, 45, 1, 0, 0, 0, 449, 451, 8, 0, 0, 0, 450, 449, 1, 0, 0, 0, 451, 452, 1, 0, 0, 0, 452, 450, 1, 0, 0, 0, 452, 453, 1, 0, 0, 0, 453, 454, 1, 0, 0, 0, 454, 455, 6, 18, 0, 0, 455, 47, 1, 0, 0, 0, 456, 457, 5, 47, 0, 0, 457, 458, 5, 47, 0, 0, 458, 462, 1, 0, 0, 0, 459, 461, 8, 1, 0, 0, 460, 459, 1, 0, 0, 0, 461, 464, 1, 0, 0, 0, 462, 460, 1, 0, 0, 0, 462, 463, 1, 0, 0, 0, 463, 466, 1, 0, 0, 0, 464, 462, 1, 0, 0, 0, 465, 467, 5, 13, 0, 0, 466, 465, 1, 0, 0, 0, 466, 467, 1, 0, 0, 0, 467, 469, 1, 0, 0, 0, 468, 470, 5, 10, 0, 0, 469, 468, 1, 0, 0, 0, 469, 470, 1, 0, 0, 0, 470, 471, 1, 0, 0, 0, 471, 472, 6, 19, 8, 0, 472, 49, 1, 0, 0, 0, 473, 474, 5, 47, 0, 0, 474, 475, 5, 42, 0, 0, 475, 480, 1, 0, 0, 0, 476, 479, 3, 50, 20, 0, 477, 479, 9, 0, 0, 0, 478, 476, 1, 0, 0, 0, 478, 477, 1, 0, 0, 0, 479, 482, 1, 0, 0, 0, 480, 481, 1, 0, 0, 0, 480, 478, 1, 0, 0, 0, 481, 483, 1, 0, 0, 0, 482, 480, 1, 0, 0, 0, 483, 484, 5, 42, 0, 0, 484, 485, 5, 47, 0, 0, 485, 486, 1, 0, 0, 0, 486, 487, 6, 20, 8, 0, 487, 51, 1, 0, 0, 0, 488, 490, 7, 2, 0, 0, 489, 488, 1, 0, 0, 0, 490, 491, 1, 0, 0, 0, 491, 489, 1, 0, 0, 0, 491, 492, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 494, 6, 21, 8, 0, 494, 53, 1, 0, 0, 0, 495, 496, 3, 158, 74, 0, 496, 497, 1, 0, 0, 0, 497, 498, 6, 22, 9, 0, 498, 499, 6, 22, 10, 0, 499, 55, 1, 0, 0, 0, 500, 501, 3, 64, 27, 0, 501, 502, 1, 0, 0, 0, 502, 503, 6, 23, 11, 0, 503, 504, 6, 23, 12, 0, 504, 57, 1, 0, 0, 0, 505, 506, 3, 52, 21, 0, 506, 507, 1, 0, 0, 0, 507, 508, 6, 24, 8, 0, 508, 59, 1, 0, 0, 0, 509, 510, 3, 48, 19, 0, 510, 511, 1, 0, 0, 0, 511, 512, 6, 25, 8, 0, 512, 61, 1, 0, 0, 0, 513, 514, 3, 50, 20, 0, 514, 515, 1, 0, 0, 0, 515, 516, 6, 26, 8, 0, 516, 63, 1, 0, 0, 0, 517, 518, 5, 124, 0, 0, 518, 519, 1, 0, 0, 0, 519, 520, 6, 27, 12, 0, 520, 65, 1, 0, 0, 0, 521, 522, 7, 3, 0, 0, 522, 67, 1, 0, 0, 0, 523, 524, 7, 4, 0, 0, 524, 69, 1, 0, 0, 0, 525, 526, 5, 92, 0, 0, 526, 527, 7, 5, 0, 0, 527, 71, 1, 0, 0, 0, 528, 529, 8, 6, 0, 0, 529, 73, 1, 0, 0, 0, 530, 532, 7, 7, 0, 0, 531, 533, 7, 8, 0, 0, 532, 531, 1, 0, 0, 0, 532, 533, 1, 0, 0, 0, 533, 535, 1, 0, 0, 0, 534, 536, 3, 66, 28, 0, 535, 534, 1, 0, 0, 0, 536, 537, 1, 0, 0, 0, 537, 535, 1, 0, 0, 0, 537, 538, 1, 0, 0, 0, 538, 75, 1, 0, 0, 0, 539, 540, 5, 64, 0, 0, 540, 77, 1, 0, 0, 0, 541, 542, 5, 96, 0, 0, 542, 79, 1, 0, 0, 0, 543, 547, 8, 9, 0, 0, 544, 545, 5, 96, 0, 0, 545, 547, 5, 96, 0, 0, 546, 543, 1, 0, 0, 0, 546, 544, 1, 0, 0, 0, 547, 81, 1, 0, 0, 0, 548, 549, 5, 95, 0, 0, 549, 83, 1, 0, 0, 0, 550, 554, 3, 68, 29, 0, 551, 554, 3, 66, 28, 0, 552, 554, 3, 82, 36, 0, 553, 550, 1, 0, 0, 0, 553, 551, 1, 0, 0, 0, 553, 552, 1, 0, 0, 0, 554, 85, 1, 0, 0, 0, 555, 560, 5, 34, 0, 0, 556, 559, 3, 70, 30, 0, 557, 559, 3, 72, 31, 0, 558, 556, 1, 0, 0, 0, 558, 557, 1, 0, 0, 0, 559, 562, 1, 0, 0, 0, 560, 558, 1, 0, 0, 0, 560, 561, 1, 0, 0, 0, 561, 563, 1, 0, 0, 0, 562, 560, 1, 0, 0, 0, 563, 585, 5, 34, 0, 0, 564, 565, 5, 34, 0, 0, 565, 566, 5, 34, 0, 0, 566, 567, 5, 34, 0, 0, 567, 571, 1, 0, 0, 0, 568, 570, 8, 1, 0, 0, 569, 568, 1, 0, 0, 0, 570, 573, 1, 0, 0, 0, 571, 572, 1, 0, 0, 0, 571, 569, 1, 0, 0, 0, 572, 574, 1, 0, 0, 0, 573, 571, 1, 0, 0, 0, 574, 575, 5, 34, 0, 0, 575, 576, 5, 34, 0, 0, 576, 577, 5, 34, 0, 0, 577, 579, 1, 0, 0, 0, 578, 580, 5, 34, 0, 0, 579, 578, 1, 0, 0, 0, 579, 580, 1, 0, 0, 0, 580, 582, 1, 0, 0, 0, 581, 583, 5, 34, 0, 0, 582, 581, 1, 0, 0, 0, 582, 583, 1, 0, 0, 0, 583, 585, 1, 0, 0, 0, 584, 555, 1, 0, 0, 0, 584, 564, 1, 0, 0, 0, 585, 87, 1, 0, 0, 0, 586, 588, 3, 66, 28, 0, 587, 586, 1, 0, 0, 0, 588, 589, 1, 0, 0, 0, 589, 587, 1, 0, 0, 0, 589, 590, 1, 0, 0, 0, 590, 89, 1, 0, 0, 0, 591, 593, 3, 66, 28, 0, 592, 591, 1, 0, 0, 0, 593, 594, 1, 0, 0, 0, 594, 592, 1, 0, 0, 0, 594, 595, 1, 0, 0, 0, 595, 596, 1, 0, 0, 0, 596, 600, 3, 104, 47, 0, 597, 599, 3, 66, 28, 0, 598, 597, 1, 0, 0, 0, 599, 602, 1, 0, 0, 0, 600, 598, 1, 0, 0, 0, 600, 601, 1, 0, 0, 0, 601, 634, 1, 0, 0, 0, 602, 600, 1, 0, 0, 0, 603, 605, 3, 104, 47, 0, 604, 606, 3, 66, 28, 0, 605, 604, 1, 0, 0, 0, 606, 607, 1, 0, 0, 0, 607, 605, 1, 0, 0, 0, 607, 608, 1, 0, 0, 0, 608, 634, 1, 0, 0, 0, 609, 611, 3, 66, 28, 0, 610, 609, 1, 0, 0, 0, 611, 612, 1, 0, 0, 0, 612, 610, 1, 0, 0, 0, 612, 613, 1, 0, 0, 0, 613, 621, 1, 0, 0, 0, 614, 618, 3, 104, 47, 0, 615, 617, 3, 66, 28, 0, 616, 615, 1, 0, 0, 0, 617, 620, 1, 0, 0, 0, 618, 616, 1, 0, 0, 0, 618, 619, 1, 0, 0, 0, 619, 622, 1, 0, 0, 0, 620, 618, 1, 0, 0, 0, 621, 614, 1, 0, 0, 0, 621, 622, 1, 0, 0, 0, 622, 623, 1, 0, 0, 0, 623, 624, 3, 74, 32, 0, 624, 634, 1, 0, 0, 0, 625, 627, 3, 104, 47, 0, 626, 628, 3, 66, 28, 0, 627, 626, 1, 0, 0, 0, 628, 629, 1, 0, 0, 0, 629, 627, 1, 0, 0, 0, 629, 630, 1, 0, 0, 0, 630, 631, 1, 0, 0, 0, 631, 632, 3, 74, 32, 0, 632, 634, 1, 0, 0, 0, 633, 592, 1, 0, 0, 0, 633, 603, 1, 0, 0, 0, 633, 610, 1, 0, 0, 0, 633, 625, 1, 0, 0, 0, 634, 91, 1, 0, 0, 0, 635, 636, 5, 98, 0, 0, 636, 637, 5, 121, 0, 0, 637, 93, 1, 0, 0, 0, 638, 639, 5, 97, 0, 0, 639, 640, 5, 110, 0, 0, 640, 641, 5, 100, 0, 0, 641, 95, 1, 0, 0, 0, 642, 643, 5, 97, 0, 0, 643, 644, 5, 115, 0, 0, 644, 645, 5, 99, 0, 0, 645, 97, 1, 0, 0, 0, 646, 647, 5, 61, 0, 0, 647, 99, 1, 0, 0, 0, 648, 649, 5, 44, 0, 0, 649, 101, 1, 0, 0, 0, 650, 651, 5, 100, 0, 0, 651, 652, 5, 101, 0, 0, 652, 653, 5, 115, 0, 0, 653, 654, 5, 99, 0, 0, 654, 103, 1, 0, 0, 0, 655, 656, 5, 46, 0, 0, 656, 105, 1, 0, 0, 0, 657, 658, 5, 102, 0, 0, 658, 659, 5, 97, 0, 0, 659, 660, 5, 108, 0, 0, 660, 661, 5, 115, 0, 0, 661, 662, 5, 101, 0, 0, 662, 107, 1, 0, 0, 0, 663, 664, 5, 102, 0, 0, 664, 665, 5, 105, 0, 0, 665, 666, 5, 114, 0, 0, 666, 667, 5, 115, 0, 0, 667, 668, 5, 116, 0, 0, 668, 109, 1, 0, 0, 0, 669, 670, 5, 108, 0, 0, 670, 671, 5, 97, 0, 0, 671, 672, 5, 115, 0, 0, 672, 673, 5, 116, 0, 0, 673, 111, 1, 0, 0, 0, 674, 675, 5, 40, 0, 0, 675, 113, 1, 0, 0, 0, 676, 677, 5, 105, 0, 0, 677, 678, 5, 110, 0, 0, 678, 115, 1, 0, 0, 0, 679, 680, 5, 105, 0, 0, 680, 681, 5, 115, 0, 0, 681, 117, 1, 0, 0, 0, 682, 683, 5, 108, 0, 0, 683, 684, 5, 105, 0, 0, 684, 685, 5, 107, 0, 0, 685, 686, 5, 101, 0, 0, 686, 119, 1, 0, 0, 0, 687, 688, 5, 110, 0, 0, 688, 689, 5, 111, 0, 0, 689, 690, 5, 116, 0, 0, 690, 121, 1, 0, 0, 0, 691, 692, 5, 110, 0, 0, 692, 693, 5, 117, 0, 0, 693, 694, 5, 108, 0, 0, 694, 695, 5, 108, 0, 0, 695, 123, 1, 0, 0, 0, 696, 697, 5, 110, 0, 0, 697, 698, 5, 117, 0, 0, 698, 699, 5, 108, 0, 0, 699, 700, 5, 108, 0, 0, 700, 701, 5, 115, 0, 0, 701, 125, 1, 0, 0, 0, 702, 703, 5, 111, 0, 0, 703, 704, 5, 114, 0, 0, 704, 127, 1, 0, 0, 0, 705, 706, 5, 63, 0, 0, 706, 129, 1, 0, 0, 0, 707, 708, 5, 114, 0, 0, 708, 709, 5, 108, 0, 0, 709, 710, 5, 105, 0, 0, 710, 711, 5, 107, 0, 0, 711, 712, 5, 101, 0, 0, 712, 131, 1, 0, 0, 0, 713, 714, 5, 41, 0, 0, 714, 133, 1, 0, 0, 0, 715, 716, 5, 116, 0, 0, 716, 717, 5, 114, 0, 0, 717, 718, 5, 117, 0, 0, 718, 719, 5, 101, 0, 0, 719, 135, 1, 0, 0, 0, 720, 721, 5, 61, 0, 0, 721, 722, 5, 61, 0, 0, 722, 137, 1, 0, 0, 0, 723, 724, 5, 33, 0, 0, 724, 725, 5, 61, 0, 0, 725, 139, 1, 0, 0, 0, 726, 727, 5, 60, 0, 0, 727, 141, 1, 0, 0, 0, 728, 729, 5, 60, 0, 0, 729, 730, 5, 61, 0, 0, 730, 143, 1, 0, 0, 0, 731, 732, 5, 62, 0, 0, 732, 145, 1, 0, 0, 0, 733, 734, 5, 62, 0, 0, 734, 735, 5, 61, 0, 0, 735, 147, 1, 0, 0, 0, 736, 737, 5, 43, 0, 0, 737, 149, 1, 0, 0, 0, 738, 739, 5, 45, 0, 0, 739, 151, 1, 0, 0, 0, 740, 741, 5, 42, 0, 0, 741, 153, 1, 0, 0, 0, 742, 743, 5, 47, 0, 0, 743, 155, 1, 0, 0, 0, 744, 745, 5, 37, 0, 0, 745, 157, 1, 0, 0, 0, 746, 747, 5, 91, 0, 0, 747, 748, 1, 0, 0, 0, 748, 749, 6, 74, 0, 0, 749, 750, 6, 74, 0, 0, 750, 159, 1, 0, 0, 0, 751, 752, 5, 93, 0, 0, 752, 753, 1, 0, 0, 0, 753, 754, 6, 75, 12, 0, 754, 755, 6, 75, 12, 0, 755, 161, 1, 0, 0, 0, 756, 760, 3, 68, 29, 0, 757, 759, 3, 84, 37, 0, 758, 757, 1, 0, 0, 0, 759, 762, 1, 0, 0, 0, 760, 758, 1, 0, 0, 0, 760, 761, 1, 0, 0, 0, 761, 773, 1, 0, 0, 0, 762, 760, 1, 0, 0, 0, 763, 766, 3, 82, 36, 0, 764, 766, 3, 76, 33, 0, 765, 763, 1, 0, 0, 0, 765, 764, 1, 0, 0, 0, 766, 768, 1, 0, 0, 0, 767, 769, 3, 84, 37, 0, 768, 767, 1, 0, 0, 0, 769, 770, 1, 0, 0, 0, 770, 768, 1, 0, 0, 0, 770, 771, 1, 0, 0, 0, 771, 773, 1, 0, 0, 0, 772, 756, 1, 0, 0, 0, 772, 765, 1, 0, 0, 0, 773, 163, 1, 0, 0, 0, 774, 776, 3, 78, 34, 0, 775, 777, 3, 80, 35, 0, 776, 775, 1, 0, 0, 0, 777, 778, 1, 0, 0, 0, 778, 776, 1, 0, 0, 0, 778, 779, 1, 0, 0, 0, 779, 780, 1, 0, 0, 0, 780, 781, 3, 78, 34, 0, 781, 165, 1, 0, 0, 0, 782, 783, 3, 48, 19, 0, 783, 784, 1, 0, 0, 0, 784, 785, 6, 78, 8, 0, 785, 167, 1, 0, 0, 0, 786, 787, 3, 50, 20, 0, 787, 788, 1, 0, 0, 0, 788, 789, 6, 79, 8, 0, 789, 169, 1, 0, 0, 0, 790, 791, 3, 52, 21, 0, 791, 792, 1, 0, 0, 0, 792, 793, 6, 80, 8, 0, 793, 171, 1, 0, 0, 0, 794, 795, 3, 64, 27, 0, 795, 796, 1, 0, 0, 0, 796, 797, 6, 81, 11, 0, 797, 798, 6, 81, 12, 0, 798, 173, 1, 0, 0, 0, 799, 800, 3, 158, 74, 0, 800, 801, 1, 0, 0, 0, 801, 802, 6, 82, 9, 0, 802, 803, 6, 82, 4, 0, 803, 804, 6, 82, 4, 0, 804, 175, 1, 0, 0, 0, 805, 806, 3, 160, 75, 0, 806, 807, 1, 0, 0, 0, 807, 808, 6, 83, 13, 0, 808, 809, 6, 83, 12, 0, 809, 810, 6, 83, 12, 0, 810, 177, 1, 0, 0, 0, 811, 812, 3, 100, 45, 0, 812, 813, 1, 0, 0, 0, 813, 814, 6, 84, 14, 0, 814, 179, 1, 0, 0, 0, 815, 816, 3, 98, 44, 0, 816, 817, 1, 0, 0, 0, 817, 818, 6, 85, 15, 0, 818, 181, 1, 0, 0, 0, 819, 820, 5, 109, 0, 0, 820, 821, 5, 101, 0, 0, 821, 822, 5, 116, 0, 0, 822, 823, 5, 97, 0, 0, 823, 824, 5, 100, 0, 0, 824, 825, 5, 97, 0, 0, 825, 826, 5, 116, 0, 0, 826, 827, 5, 97, 0, 0, 827, 183, 1, 0, 0, 0, 828, 832, 8, 10, 0, 0, 829, 830, 5, 47, 0, 0, 830, 832, 8, 11, 0, 0, 831, 828, 1, 0, 0, 0, 831, 829, 1, 0, 0, 0, 832, 185, 1, 0, 0, 0, 833, 835, 3, 184, 87, 0, 834, 833, 1, 0, 0, 0, 835, 836, 1, 0, 0, 0, 836, 834, 1, 0, 0, 0, 836, 837, 1, 0, 0, 0, 837, 187, 1, 0, 0, 0, 838, 839, 3, 164, 77, 0, 839, 840, 1, 0, 0, 0, 840, 841, 6, 89, 16, 0, 841, 189, 1, 0, 0, 0, 842, 843, 3, 48, 19, 0, 843, 844, 1, 0, 0, 0, 844, 845, 6, 90, 8, 0, 845, 191, 1, 0, 0, 0, 846, 847, 3, 50, 20, 0, 847, 848, 1, 0, 0, 0, 848, 849, 6, 91, 8, 0, 849, 193, 1, 0, 0, 0, 850, 851, 3, 52, 21, 0, 851, 852, 1, 0, 0, 0, 852, 853, 6, 92, 8, 0, 853, 195, 1, 0, 0, 0, 854, 855, 3, 64, 27, 0, 855, 856, 1, 0, 0, 0, 856, 857, 6, 93, 11, 0, 857, 858, 6, 93, 12, 0, 858, 197, 1, 0, 0, 0, 859, 860, 3, 104, 47, 0, 860, 861, 1, 0, 0, 0, 861, 862, 6, 94, 17, 0, 862, 199, 1, 0, 0, 0, 863, 864, 3, 100, 45, 0, 864, 865, 1, 0, 0, 0, 865, 866, 6, 95, 14, 0, 866, 201, 1, 0, 0, 0, 867, 872, 3, 68, 29, 0, 868, 872, 3, 66, 28, 0, 869, 872, 3, 82, 36, 0, 870, 872, 3, 152, 71, 0, 871, 867, 1, 0, 0, 0, 871, 868, 1, 0, 0, 0, 871, 869, 1, 0, 0, 0, 871, 870, 1, 0, 0, 0, 872, 203, 1, 0, 0, 0, 873, 876, 3, 68, 29, 0, 874, 876, 3, 152, 71, 0, 875, 873, 1, 0, 0, 0, 875, 874, 1, 0, 0, 0, 876, 880, 1, 0, 0, 0, 877, 879, 3, 202, 96, 0, 878, 877, 1, 0, 0, 0, 879, 882, 1, 0, 0, 0, 880, 878, 1, 0, 0, 0, 880, 881, 1, 0, 0, 0, 881, 893, 1, 0, 0, 0, 882, 880, 1, 0, 0, 0, 883, 886, 3, 82, 36, 0, 884, 886, 3, 76, 33, 0, 885, 883, 1, 0, 0, 0, 885, 884, 1, 0, 0, 0, 886, 888, 1, 0, 0, 0, 887, 889, 3, 202, 96, 0, 888, 887, 1, 0, 0, 0, 889, 890, 1, 0, 0, 0, 890, 888, 1, 0, 0, 0, 890, 891, 1, 0, 0, 0, 891, 893, 1, 0, 0, 0, 892, 875, 1, 0, 0, 0, 892, 885, 1, 0, 0, 0, 893, 205, 1, 0, 0, 0, 894, 895, 3, 164, 77, 0, 895, 896, 1, 0, 0, 0, 896, 897, 6, 98, 16, 0, 897, 207, 1, 0, 0, 0, 898, 899, 3, 48, 19, 0, 899, 900, 1, 0, 0, 0, 900, 901, 6, 99, 8, 0, 901, 209, 1, 0, 0, 0, 902, 903, 3, 50, 20, 0, 903, 904, 1, 0, 0, 0, 904, 905, 6, 100, 8, 0, 905, 211, 1, 0, 0, 0, 906, 907, 3, 52, 21, 0, 907, 908, 1, 0, 0, 0, 908, 909, 6, 101, 8, 0, 909, 213, 1, 0, 0, 0, 910, 911, 3, 64, 27, 0, 911, 912, 1, 0, 0, 0, 912, 913, 6, 102, 11, 0, 913, 914, 6, 102, 12, 0, 914, 215, 1, 0, 0, 0, 915, 916, 3, 98, 44, 0, 916, 917, 1, 0, 0, 0, 917, 918, 6, 103, 15, 0, 918, 217, 1, 0, 0, 0, 919, 920, 3, 100, 45, 0, 920, 921, 1, 0, 0, 0, 921, 922, 6, 104, 14, 0, 922, 219, 1, 0, 0, 0, 923, 924, 3, 104, 47, 0, 924, 925, 1, 0, 0, 0, 925, 926, 6, 105, 17, 0, 926, 221, 1, 0, 0, 0, 927, 928, 5, 97, 0, 0, 928, 929, 5, 115, 0, 0, 929, 223, 1, 0, 0, 0, 930, 931, 3, 164, 77, 0, 931, 932, 1, 0, 0, 0, 932, 933, 6, 107, 16, 0, 933, 225, 1, 0, 0, 0, 934, 935, 3, 204, 97, 0, 935, 936, 1, 0, 0, 0, 936, 937, 6, 108, 18, 0, 937, 227, 1, 0, 0, 0, 938, 939, 3, 48, 19, 0, 939, 940, 1, 0, 0, 0, 940, 941, 6, 109, 8, 0, 941, 229, 1, 0, 0, 0, 942, 943, 3, 50, 20, 0, 943, 944, 1, 0, 0, 0, 944, 945, 6, 110, 8, 0, 945, 231, 1, 0, 0, 0, 946, 947, 3, 52, 21, 0, 947, 948, 1, 0, 0, 0, 948, 949, 6, 111, 8, 0, 949, 233, 1, 0, 0, 0, 950, 951, 3, 64, 27, 0, 951, 952, 1, 0, 0, 0, 952, 953, 6, 112, 11, 0, 953, 954, 6, 112, 12, 0, 954, 235, 1, 0, 0, 0, 955, 956, 5, 111, 0, 0, 956, 957, 5, 110, 0, 0, 957, 958, 1, 0, 0, 0, 958, 959, 6, 113, 19, 0, 959, 237, 1, 0, 0, 0, 960, 961, 5, 119, 0, 0, 961, 962, 5, 105, 0, 0, 962, 963, 5, 116, 0, 0, 963, 964, 5, 104, 0, 0, 964, 965, 1, 0, 0, 0, 965, 966, 6, 114, 19, 0, 966, 239, 1, 0, 0, 0, 967, 968, 3, 186, 88, 0, 968, 969, 1, 0, 0, 0, 969, 970, 6, 115, 20, 0, 970, 241, 1, 0, 0, 0, 971, 972, 3, 164, 77, 0, 972, 973, 1, 0, 0, 0, 973, 974, 6, 116, 16, 0, 974, 243, 1, 0, 0, 0, 975, 976, 3, 48, 19, 0, 976, 977, 1, 0, 0, 0, 977, 978, 6, 117, 8, 0, 978, 245, 1, 0, 0, 0, 979, 980, 3, 50, 20, 0, 980, 981, 1, 0, 0, 0, 981, 982, 6, 118, 8, 0, 982, 247, 1, 0, 0, 0, 983, 984, 3, 52, 21, 0, 984, 985, 1, 0, 0, 0, 985, 986, 6, 119, 8, 0, 986, 249, 1, 0, 0, 0, 987, 988, 3, 64, 27, 0, 988, 989, 1, 0, 0, 0, 989, 990, 6, 120, 11, 0, 990, 991, 6, 120, 12, 0, 991, 992, 6, 120, 12, 0, 992, 251, 1, 0, 0, 0, 993, 994, 3, 98, 44, 0, 994, 995, 1, 0, 0, 0, 995, 996, 6, 121, 15, 0, 996, 253, 1, 0, 0, 0, 997, 998, 3, 100, 45, 0, 998, 999, 1, 0, 0, 0, 999, 1000, 6, 122, 14, 0, 1000, 255, 1, 0, 0, 0, 1001, 1002, 3, 104, 47, 0, 1002, 1003, 1, 0, 0, 0, 1003, 1004, 6, 123, 17, 0, 1004, 257, 1, 0, 0, 0, 1005, 1006, 3, 238, 114, 0, 1006, 1007, 1, 0, 0, 0, 1007, 1008, 6, 124, 21, 0, 1008, 259, 1, 0, 0, 0, 1009, 1010, 3, 204, 97, 0, 1010, 1011, 1, 0, 0, 0, 1011, 1012, 6, 125, 18, 0, 1012, 261, 1, 0, 0, 0, 1013, 1014, 3, 164, 77, 0, 1014, 1015, 1, 0, 0, 0, 1015, 1016, 6, 126, 16, 0, 1016, 263, 1, 0, 0, 0, 1017, 1018, 3, 48, 19, 0, 1018, 1019, 1, 0, 0, 0, 1019, 1020, 6, 127, 8, 0, 1020, 265, 1, 0, 0, 0, 1021, 1022, 3, 50, 20, 0, 1022, 1023, 1, 0, 0, 0, 1023, 1024, 6, 128, 8, 0, 1024, 267, 1, 0, 0, 0, 1025, 1026, 3, 52, 21, 0, 1026, 1027, 1, 0, 0, 0, 1027, 1028, 6, 129, 8, 0, 1028, 269, 1, 0, 0, 0, 1029, 1030, 3, 64, 27, 0, 1030, 1031, 1, 0, 0, 0, 1031, 1032, 6, 130, 11, 0, 1032, 1033, 6, 130, 12, 0, 1033, 271, 1, 0, 0, 0, 1034, 1035, 3, 104, 47, 0, 1035, 1036, 1, 0, 0, 0, 1036, 1037, 6, 131, 17, 0, 1037, 273, 1, 0, 0, 0, 1038, 1039, 3, 164, 77, 0, 1039, 1040, 1, 0, 0, 0, 1040, 1041, 6, 132, 16, 0, 1041, 275, 1, 0, 0, 0, 1042, 1043, 3, 162, 76, 0, 1043, 1044, 1, 0, 0, 0, 1044, 1045, 6, 133, 22, 0, 1045, 277, 1, 0, 0, 0, 1046, 1047, 3, 48, 19, 0, 1047, 1048, 1, 0, 0, 0, 1048, 1049, 6, 134, 8, 0, 1049, 279, 1, 0, 0, 0, 1050, 1051, 3, 50, 20, 0, 1051, 1052, 1, 0, 0, 0, 1052, 1053, 6, 135, 8, 0, 1053, 281, 1, 0, 0, 0, 1054, 1055, 3, 52, 21, 0, 1055, 1056, 1, 0, 0, 0, 1056, 1057, 6, 136, 8, 0, 1057, 283, 1, 0, 0, 0, 1058, 1059, 3, 64, 27, 0, 1059, 1060, 1, 0, 0, 0, 1060, 1061, 6, 137, 11, 0, 1061, 1062, 6, 137, 12, 0, 1062, 285, 1, 0, 0, 0, 1063, 1064, 5, 105, 0, 0, 1064, 1065, 5, 110, 0, 0, 1065, 1066, 5, 102, 0, 0, 1066, 1067, 5, 111, 0, 0, 1067, 287, 1, 0, 0, 0, 1068, 1069, 5, 102, 0, 0, 1069, 1070, 5, 117, 0, 0, 1070, 1071, 5, 110, 0, 0, 1071, 1072, 5, 99, 0, 0, 1072, 1073, 5, 116, 0, 0, 1073, 1074, 5, 105, 0, 0, 1074, 1075, 5, 111, 0, 0, 1075, 1076, 5, 110, 0, 0, 1076, 1077, 5, 115, 0, 0, 1077, 289, 1, 0, 0, 0, 1078, 1079, 3, 48, 19, 0, 1079, 1080, 1, 0, 0, 0, 1080, 1081, 6, 140, 8, 0, 1081, 291, 1, 0, 0, 0, 1082, 1083, 3, 50, 20, 0, 1083, 1084, 1, 0, 0, 0, 1084, 1085, 6, 141, 8, 0, 1085, 293, 1, 0, 0, 0, 1086, 1087, 3, 52, 21, 0, 1087, 1088, 1, 0, 0, 0, 1088, 1089, 6, 142, 8, 0, 1089, 295, 1, 0, 0, 0, 49, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 452, 462, 466, 469, 478, 480, 491, 532, 537, 546, 553, 558, 560, 571, 579, 582, 584, 589, 594, 600, 607, 612, 618, 621, 629, 633, 760, 765, 770, 772, 778, 831, 836, 871, 875, 880, 885, 890, 892, 23, 5, 2, 0, 5, 4, 0, 5, 6, 0, 5, 1, 0, 5, 3, 0, 5, 8, 0, 5, 5, 0, 5, 9, 0, 0, 1, 0, 7, 63, 0, 5, 0, 0, 7, 26, 0, 4, 0, 0, 7, 64, 0, 7, 34, 0, 7, 33, 0, 7, 66, 0, 7, 36, 0, 7, 75, 0, 5, 7, 0, 7, 71, 0, 7, 84, 0, 7, 65, 0] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java index 5a01cfa11b3fd..8946172327bcc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseLexer.java @@ -24,20 +24,28 @@ public class EsqlBaseLexer extends Lexer { PIPE=26, STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, AND=31, ASC=32, ASSIGN=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, LAST=39, LP=40, IN=41, IS=42, LIKE=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, - RLIKE=49, RP=50, TRUE=51, INFO=52, FUNCTIONS=53, EQ=54, NEQ=55, LT=56, - LTE=57, GT=58, GTE=59, PLUS=60, MINUS=61, ASTERISK=62, SLASH=63, PERCENT=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, AS=72, METADATA=73, - ON=74, WITH=75, SRC_UNQUOTED_IDENTIFIER=76, SRC_QUOTED_IDENTIFIER=77, - SRC_LINE_COMMENT=78, SRC_MULTILINE_COMMENT=79, SRC_WS=80, EXPLAIN_PIPE=81; + RLIKE=49, RP=50, TRUE=51, EQ=52, NEQ=53, LT=54, LTE=55, GT=56, GTE=57, + PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, OPENING_BRACKET=63, + CLOSING_BRACKET=64, UNQUOTED_IDENTIFIER=65, QUOTED_IDENTIFIER=66, EXPR_LINE_COMMENT=67, + EXPR_MULTILINE_COMMENT=68, EXPR_WS=69, METADATA=70, FROM_UNQUOTED_IDENTIFIER=71, + FROM_LINE_COMMENT=72, FROM_MULTILINE_COMMENT=73, FROM_WS=74, PROJECT_UNQUOTED_IDENTIFIER=75, + PROJECT_LINE_COMMENT=76, PROJECT_MULTILINE_COMMENT=77, PROJECT_WS=78, + AS=79, RENAME_LINE_COMMENT=80, RENAME_MULTILINE_COMMENT=81, RENAME_WS=82, + ON=83, WITH=84, ENRICH_LINE_COMMENT=85, ENRICH_MULTILINE_COMMENT=86, ENRICH_WS=87, + ENRICH_FIELD_LINE_COMMENT=88, ENRICH_FIELD_MULTILINE_COMMENT=89, ENRICH_FIELD_WS=90, + MVEXPAND_LINE_COMMENT=91, MVEXPAND_MULTILINE_COMMENT=92, MVEXPAND_WS=93, + INFO=94, FUNCTIONS=95, SHOW_LINE_COMMENT=96, SHOW_MULTILINE_COMMENT=97, + SHOW_WS=98; public static final int - EXPLAIN_MODE=1, EXPRESSION=2, SOURCE_IDENTIFIERS=3; + EXPLAIN_MODE=1, EXPRESSION_MODE=2, FROM_MODE=3, PROJECT_MODE=4, RENAME_MODE=5, + ENRICH_MODE=6, ENRICH_FIELD_MODE=7, MVEXPAND_MODE=8, SHOW_MODE=9; public static String[] channelNames = { "DEFAULT_TOKEN_CHANNEL", "HIDDEN" }; public static String[] modeNames = { - "DEFAULT_MODE", "EXPLAIN_MODE", "EXPRESSION", "SOURCE_IDENTIFIERS" + "DEFAULT_MODE", "EXPLAIN_MODE", "EXPRESSION_MODE", "FROM_MODE", "PROJECT_MODE", + "RENAME_MODE", "ENRICH_MODE", "ENRICH_FIELD_MODE", "MVEXPAND_MODE", "SHOW_MODE" }; private static String[] makeRuleNames() { @@ -47,16 +55,30 @@ private static String[] makeRuleNames() { "STATS", "WHERE", "UNKNOWN_CMD", "LINE_COMMENT", "MULTILINE_COMMENT", "WS", "EXPLAIN_OPENING_BRACKET", "EXPLAIN_PIPE", "EXPLAIN_WS", "EXPLAIN_LINE_COMMENT", "EXPLAIN_MULTILINE_COMMENT", "PIPE", "DIGIT", "LETTER", "ESCAPE_SEQUENCE", - "UNESCAPED_CHARS", "EXPONENT", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", + "UNESCAPED_CHARS", "EXPONENT", "ASPERAND", "BACKQUOTE", "BACKQUOTE_BLOCK", + "UNDERSCORE", "UNQUOTED_ID_BODY", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", - "RLIKE", "RP", "TRUE", "INFO", "FUNCTIONS", "EQ", "NEQ", "LT", "LTE", - "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", - "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", - "EXPR_MULTILINE_COMMENT", "EXPR_WS", "SRC_PIPE", "SRC_OPENING_BRACKET", - "SRC_CLOSING_BRACKET", "SRC_COMMA", "SRC_ASSIGN", "AS", "METADATA", "ON", - "WITH", "SRC_UNQUOTED_IDENTIFIER", "SRC_UNQUOTED_IDENTIFIER_PART", "SRC_QUOTED_IDENTIFIER", - "SRC_LINE_COMMENT", "SRC_MULTILINE_COMMENT", "SRC_WS" + "RLIKE", "RP", "TRUE", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", + "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", + "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", + "EXPR_WS", "FROM_PIPE", "FROM_OPENING_BRACKET", "FROM_CLOSING_BRACKET", + "FROM_COMMA", "FROM_ASSIGN", "METADATA", "FROM_UNQUOTED_IDENTIFIER_PART", + "FROM_UNQUOTED_IDENTIFIER", "FROM_QUOTED_IDENTIFIER", "FROM_LINE_COMMENT", + "FROM_MULTILINE_COMMENT", "FROM_WS", "PROJECT_PIPE", "PROJECT_DOT", "PROJECT_COMMA", + "UNQUOTED_ID_BODY_WITH_PATTERN", "PROJECT_UNQUOTED_IDENTIFIER", "PROJECT_QUOTED_IDENTIFIER", + "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", "PROJECT_WS", "RENAME_PIPE", + "RENAME_ASSIGN", "RENAME_COMMA", "RENAME_DOT", "AS", "RENAME_QUOTED_IDENTIFIER", + "RENAME_UNQUOTED_IDENTIFIER", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ENRICH_PIPE", "ON", "WITH", "ENRICH_POLICY_UNQUOTED_IDENTIFIER", + "ENRICH_QUOTED_IDENTIFIER", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", + "ENRICH_WS", "ENRICH_FIELD_PIPE", "ENRICH_FIELD_ASSIGN", "ENRICH_FIELD_COMMA", + "ENRICH_FIELD_DOT", "ENRICH_FIELD_WITH", "ENRICH_FIELD_UNQUOTED_IDENTIFIER", + "ENRICH_FIELD_QUOTED_IDENTIFIER", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", + "ENRICH_FIELD_WS", "MVEXPAND_PIPE", "MVEXPAND_DOT", "MVEXPAND_QUOTED_IDENTIFIER", + "MVEXPAND_UNQUOTED_IDENTIFIER", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", + "MVEXPAND_WS", "SHOW_PIPE", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", + "SHOW_MULTILINE_COMMENT", "SHOW_WS" }; } public static final String[] ruleNames = makeRuleNames(); @@ -66,12 +88,14 @@ private static String[] makeLiteralNames() { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", "'grok'", "'inlinestats'", "'keep'", "'limit'", "'mv_expand'", "'project'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, - null, null, null, null, null, null, null, null, null, null, "'by'", "'and'", - "'asc'", null, null, "'desc'", "'.'", "'false'", "'first'", "'last'", - "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", "'or'", - "'?'", "'rlike'", "')'", "'true'", "'info'", "'functions'", "'=='", "'!='", - "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, - "']'", null, null, null, null, null, "'as'", "'metadata'", "'on'", "'with'" + null, null, null, null, null, null, "'|'", null, null, null, "'by'", + "'and'", "'asc'", "'='", "','", "'desc'", "'.'", "'false'", "'first'", + "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", + "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'!='", "'<'", "'<='", + "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", null, + null, null, null, null, "'metadata'", null, null, null, null, null, null, + null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, + null, null, null, null, null, null, "'info'", "'functions'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -84,12 +108,17 @@ private static String[] makeSymbolicNames() { "PIPE", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", - "TRUE", "INFO", "FUNCTIONS", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", - "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "AS", "METADATA", "ON", "WITH", "SRC_UNQUOTED_IDENTIFIER", - "SRC_QUOTED_IDENTIFIER", "SRC_LINE_COMMENT", "SRC_MULTILINE_COMMENT", - "SRC_WS", "EXPLAIN_PIPE" + "TRUE", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", + "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", + "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "PROJECT_UNQUOTED_IDENTIFIER", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", + "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", + "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", + "MVEXPAND_WS", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", + "SHOW_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -152,482 +181,683 @@ public EsqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\u0004\u0000Q\u02fc\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ - "\u0006\uffff\uffff\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ - "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ - "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ - "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ - "\f\u0007\f\u0002\r\u0007\r\u0002\u000e\u0007\u000e\u0002\u000f\u0007\u000f"+ - "\u0002\u0010\u0007\u0010\u0002\u0011\u0007\u0011\u0002\u0012\u0007\u0012"+ - "\u0002\u0013\u0007\u0013\u0002\u0014\u0007\u0014\u0002\u0015\u0007\u0015"+ - "\u0002\u0016\u0007\u0016\u0002\u0017\u0007\u0017\u0002\u0018\u0007\u0018"+ - "\u0002\u0019\u0007\u0019\u0002\u001a\u0007\u001a\u0002\u001b\u0007\u001b"+ - "\u0002\u001c\u0007\u001c\u0002\u001d\u0007\u001d\u0002\u001e\u0007\u001e"+ - "\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+ - "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ - "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ - "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u00021\u00071\u0002"+ - "2\u00072\u00023\u00073\u00024\u00074\u00025\u00075\u00026\u00076\u0002"+ - "7\u00077\u00028\u00078\u00029\u00079\u0002:\u0007:\u0002;\u0007;\u0002"+ - "<\u0007<\u0002=\u0007=\u0002>\u0007>\u0002?\u0007?\u0002@\u0007@\u0002"+ - "A\u0007A\u0002B\u0007B\u0002C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002"+ - "F\u0007F\u0002G\u0007G\u0002H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002"+ - "K\u0007K\u0002L\u0007L\u0002M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002"+ - "P\u0007P\u0002Q\u0007Q\u0002R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002"+ - "U\u0007U\u0002V\u0007V\u0002W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002"+ - "Z\u0007Z\u0002[\u0007[\u0002\\\u0007\\\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000"+ - "\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006"+ - "\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007"+ - "\u0001\u0007\u0001\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001"+ - "\b\u0001\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001"+ - "\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ - "\n\u0001\n\u0001\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ - "\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001"+ - "\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f"+ + "\u0004\u0000b\u0442\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff"+ + "\u0006\uffff\uffff\u0006\uffff\uffff\u0006\uffff\uffff\u0002\u0000\u0007"+ + "\u0000\u0002\u0001\u0007\u0001\u0002\u0002\u0007\u0002\u0002\u0003\u0007"+ + "\u0003\u0002\u0004\u0007\u0004\u0002\u0005\u0007\u0005\u0002\u0006\u0007"+ + "\u0006\u0002\u0007\u0007\u0007\u0002\b\u0007\b\u0002\t\u0007\t\u0002\n"+ + "\u0007\n\u0002\u000b\u0007\u000b\u0002\f\u0007\f\u0002\r\u0007\r\u0002"+ + "\u000e\u0007\u000e\u0002\u000f\u0007\u000f\u0002\u0010\u0007\u0010\u0002"+ + "\u0011\u0007\u0011\u0002\u0012\u0007\u0012\u0002\u0013\u0007\u0013\u0002"+ + "\u0014\u0007\u0014\u0002\u0015\u0007\u0015\u0002\u0016\u0007\u0016\u0002"+ + "\u0017\u0007\u0017\u0002\u0018\u0007\u0018\u0002\u0019\u0007\u0019\u0002"+ + "\u001a\u0007\u001a\u0002\u001b\u0007\u001b\u0002\u001c\u0007\u001c\u0002"+ + "\u001d\u0007\u001d\u0002\u001e\u0007\u001e\u0002\u001f\u0007\u001f\u0002"+ + " \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002#\u0007#\u0002$\u0007$\u0002"+ + "%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002(\u0007(\u0002)\u0007)\u0002"+ + "*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002-\u0007-\u0002.\u0007.\u0002"+ + "/\u0007/\u00020\u00070\u00021\u00071\u00022\u00072\u00023\u00073\u0002"+ + "4\u00074\u00025\u00075\u00026\u00076\u00027\u00077\u00028\u00078\u0002"+ + "9\u00079\u0002:\u0007:\u0002;\u0007;\u0002<\u0007<\u0002=\u0007=\u0002"+ + ">\u0007>\u0002?\u0007?\u0002@\u0007@\u0002A\u0007A\u0002B\u0007B\u0002"+ + "C\u0007C\u0002D\u0007D\u0002E\u0007E\u0002F\u0007F\u0002G\u0007G\u0002"+ + "H\u0007H\u0002I\u0007I\u0002J\u0007J\u0002K\u0007K\u0002L\u0007L\u0002"+ + "M\u0007M\u0002N\u0007N\u0002O\u0007O\u0002P\u0007P\u0002Q\u0007Q\u0002"+ + "R\u0007R\u0002S\u0007S\u0002T\u0007T\u0002U\u0007U\u0002V\u0007V\u0002"+ + "W\u0007W\u0002X\u0007X\u0002Y\u0007Y\u0002Z\u0007Z\u0002[\u0007[\u0002"+ + "\\\u0007\\\u0002]\u0007]\u0002^\u0007^\u0002_\u0007_\u0002`\u0007`\u0002"+ + "a\u0007a\u0002b\u0007b\u0002c\u0007c\u0002d\u0007d\u0002e\u0007e\u0002"+ + "f\u0007f\u0002g\u0007g\u0002h\u0007h\u0002i\u0007i\u0002j\u0007j\u0002"+ + "k\u0007k\u0002l\u0007l\u0002m\u0007m\u0002n\u0007n\u0002o\u0007o\u0002"+ + "p\u0007p\u0002q\u0007q\u0002r\u0007r\u0002s\u0007s\u0002t\u0007t\u0002"+ + "u\u0007u\u0002v\u0007v\u0002w\u0007w\u0002x\u0007x\u0002y\u0007y\u0002"+ + "z\u0007z\u0002{\u0007{\u0002|\u0007|\u0002}\u0007}\u0002~\u0007~\u0002"+ + "\u007f\u0007\u007f\u0002\u0080\u0007\u0080\u0002\u0081\u0007\u0081\u0002"+ + "\u0082\u0007\u0082\u0002\u0083\u0007\u0083\u0002\u0084\u0007\u0084\u0002"+ + "\u0085\u0007\u0085\u0002\u0086\u0007\u0086\u0002\u0087\u0007\u0087\u0002"+ + "\u0088\u0007\u0088\u0002\u0089\u0007\u0089\u0002\u008a\u0007\u008a\u0002"+ + "\u008b\u0007\u008b\u0002\u008c\u0007\u008c\u0002\u008d\u0007\u008d\u0002"+ + "\u008e\u0007\u008e\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0000\u0001"+ + "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001"+ + "\u0002\u0001\u0002\u0001\u0002\u0001\u0002\u0001\u0003\u0001\u0003\u0001"+ + "\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0004\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0004\u0001"+ + "\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\t"+ + "\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\n\u0001"+ + "\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0001\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b"+ + "\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001"+ + "\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\f\u0001\r\u0001"+ + "\r\u0001\r\u0001\r\u0001\r\u0001\r\u0001\u000e\u0001\u000e\u0001\u000e"+ + "\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000f\u0001\u000f"+ + "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u0010"+ "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0010"+ - "\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ - "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0004\u0012"+ - "\u0159\b\u0012\u000b\u0012\f\u0012\u015a\u0001\u0012\u0001\u0012\u0001"+ - "\u0013\u0001\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u0163\b\u0013\n"+ - "\u0013\f\u0013\u0166\t\u0013\u0001\u0013\u0003\u0013\u0169\b\u0013\u0001"+ - "\u0013\u0003\u0013\u016c\b\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0005\u0014\u0175\b\u0014\n"+ - "\u0014\f\u0014\u0178\t\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ - "\u0014\u0001\u0014\u0001\u0015\u0004\u0015\u0180\b\u0015\u000b\u0015\f"+ - "\u0015\u0181\u0001\u0015\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016"+ - "\u0001\u0016\u0001\u0016\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017"+ - "\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0019"+ - "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a"+ - "\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001c"+ - "\u0001\u001c\u0001\u001d\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001e"+ - "\u0001\u001f\u0001\u001f\u0001 \u0001 \u0003 \u01ab\b \u0001 \u0004 \u01ae"+ - "\b \u000b \f \u01af\u0001!\u0001!\u0001!\u0005!\u01b5\b!\n!\f!\u01b8\t"+ - "!\u0001!\u0001!\u0001!\u0001!\u0001!\u0001!\u0005!\u01c0\b!\n!\f!\u01c3"+ - "\t!\u0001!\u0001!\u0001!\u0001!\u0001!\u0003!\u01ca\b!\u0001!\u0003!\u01cd"+ - "\b!\u0003!\u01cf\b!\u0001\"\u0004\"\u01d2\b\"\u000b\"\f\"\u01d3\u0001"+ - "#\u0004#\u01d7\b#\u000b#\f#\u01d8\u0001#\u0001#\u0005#\u01dd\b#\n#\f#"+ - "\u01e0\t#\u0001#\u0001#\u0004#\u01e4\b#\u000b#\f#\u01e5\u0001#\u0004#"+ - "\u01e9\b#\u000b#\f#\u01ea\u0001#\u0001#\u0005#\u01ef\b#\n#\f#\u01f2\t"+ - "#\u0003#\u01f4\b#\u0001#\u0001#\u0001#\u0001#\u0004#\u01fa\b#\u000b#\f"+ - "#\u01fb\u0001#\u0001#\u0003#\u0200\b#\u0001$\u0001$\u0001$\u0001%\u0001"+ - "%\u0001%\u0001%\u0001&\u0001&\u0001&\u0001&\u0001\'\u0001\'\u0001(\u0001"+ - "(\u0001)\u0001)\u0001)\u0001)\u0001)\u0001*\u0001*\u0001+\u0001+\u0001"+ - "+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001,\u0001,\u0001,\u0001"+ - "-\u0001-\u0001-\u0001-\u0001-\u0001.\u0001.\u0001/\u0001/\u0001/\u0001"+ - "0\u00010\u00010\u00011\u00011\u00011\u00011\u00011\u00012\u00012\u0001"+ - "2\u00012\u00013\u00013\u00013\u00013\u00013\u00014\u00014\u00014\u0001"+ - "4\u00014\u00014\u00015\u00015\u00015\u00016\u00016\u00017\u00017\u0001"+ - "7\u00017\u00017\u00017\u00018\u00018\u00019\u00019\u00019\u00019\u0001"+ - "9\u0001:\u0001:\u0001:\u0001:\u0001:\u0001;\u0001;\u0001;\u0001;\u0001"+ - ";\u0001;\u0001;\u0001;\u0001;\u0001;\u0001<\u0001<\u0001<\u0001=\u0001"+ - "=\u0001=\u0001>\u0001>\u0001?\u0001?\u0001?\u0001@\u0001@\u0001A\u0001"+ - "A\u0001A\u0001B\u0001B\u0001C\u0001C\u0001D\u0001D\u0001E\u0001E\u0001"+ - "F\u0001F\u0001G\u0001G\u0001G\u0001G\u0001G\u0001H\u0001H\u0001H\u0001"+ - "H\u0001H\u0001I\u0001I\u0001I\u0001I\u0005I\u028e\bI\nI\fI\u0291\tI\u0001"+ - "I\u0001I\u0001I\u0001I\u0004I\u0297\bI\u000bI\fI\u0298\u0003I\u029b\b"+ - "I\u0001J\u0001J\u0001J\u0001J\u0005J\u02a1\bJ\nJ\fJ\u02a4\tJ\u0001J\u0001"+ - "J\u0001K\u0001K\u0001K\u0001K\u0001L\u0001L\u0001L\u0001L\u0001M\u0001"+ - "M\u0001M\u0001M\u0001N\u0001N\u0001N\u0001N\u0001N\u0001O\u0001O\u0001"+ - "O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001P\u0001P\u0001"+ - "Q\u0001Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001S\u0001S\u0001"+ - "S\u0001T\u0001T\u0001T\u0001T\u0001T\u0001T\u0001T\u0001T\u0001T\u0001"+ - "U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001V\u0001W\u0004W\u02e2"+ - "\bW\u000bW\fW\u02e3\u0001X\u0004X\u02e7\bX\u000bX\fX\u02e8\u0001X\u0001"+ - "X\u0003X\u02ed\bX\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001[\u0001"+ - "[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0002\u0176\u01c1\u0000"+ - "]\u0004\u0001\u0006\u0002\b\u0003\n\u0004\f\u0005\u000e\u0006\u0010\u0007"+ - "\u0012\b\u0014\t\u0016\n\u0018\u000b\u001a\f\u001c\r\u001e\u000e \u000f"+ - "\"\u0010$\u0011&\u0012(\u0013*\u0014,\u0015.\u00160\u00002Q4\u00176\u0018"+ - "8\u0019:\u001a<\u0000>\u0000@\u0000B\u0000D\u0000F\u001bH\u001cJ\u001d"+ - "L\u001eN\u001fP R!T\"V#X$Z%\\&^\'`(b)d*f+h,j-l.n/p0r1t2v3x4z5|6~7\u0080"+ - "8\u00829\u0084:\u0086;\u0088<\u008a=\u008c>\u008e?\u0090@\u0092A\u0094"+ - "B\u0096C\u0098D\u009aE\u009cF\u009eG\u00a0\u0000\u00a2\u0000\u00a4\u0000"+ - "\u00a6\u0000\u00a8\u0000\u00aaH\u00acI\u00aeJ\u00b0K\u00b2L\u00b4\u0000"+ - "\u00b6M\u00b8N\u00baO\u00bcP\u0004\u0000\u0001\u0002\u0003\r\u0006\u0000"+ - "\t\n\r\r //[[]]\u0002\u0000\n\n\r\r\u0003\u0000\t\n\r\r \u0001\u0000"+ - "09\u0002\u0000AZaz\u0005\u0000\"\"\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\"+ - "\\\u0002\u0000EEee\u0002\u0000++--\u0002\u0000@@__\u0001\u0000``\n\u0000"+ - "\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u0318\u0000\u0004\u0001\u0000"+ - "\u0000\u0000\u0000\u0006\u0001\u0000\u0000\u0000\u0000\b\u0001\u0000\u0000"+ - "\u0000\u0000\n\u0001\u0000\u0000\u0000\u0000\f\u0001\u0000\u0000\u0000"+ - "\u0000\u000e\u0001\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000"+ - "\u0000\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000"+ - "\u0000\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000"+ - "\u0000\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000"+ - "\u0000\u001e\u0001\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000"+ - "\"\u0001\u0000\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001"+ - "\u0000\u0000\u0000\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000"+ - "\u0000\u0000,\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u0001"+ - "0\u0001\u0000\u0000\u0000\u00012\u0001\u0000\u0000\u0000\u00014\u0001"+ - "\u0000\u0000\u0000\u00016\u0001\u0000\u0000\u0000\u00018\u0001\u0000\u0000"+ - "\u0000\u0002:\u0001\u0000\u0000\u0000\u0002F\u0001\u0000\u0000\u0000\u0002"+ - "H\u0001\u0000\u0000\u0000\u0002J\u0001\u0000\u0000\u0000\u0002L\u0001"+ - "\u0000\u0000\u0000\u0002N\u0001\u0000\u0000\u0000\u0002P\u0001\u0000\u0000"+ - "\u0000\u0002R\u0001\u0000\u0000\u0000\u0002T\u0001\u0000\u0000\u0000\u0002"+ - "V\u0001\u0000\u0000\u0000\u0002X\u0001\u0000\u0000\u0000\u0002Z\u0001"+ - "\u0000\u0000\u0000\u0002\\\u0001\u0000\u0000\u0000\u0002^\u0001\u0000"+ - "\u0000\u0000\u0002`\u0001\u0000\u0000\u0000\u0002b\u0001\u0000\u0000\u0000"+ - "\u0002d\u0001\u0000\u0000\u0000\u0002f\u0001\u0000\u0000\u0000\u0002h"+ - "\u0001\u0000\u0000\u0000\u0002j\u0001\u0000\u0000\u0000\u0002l\u0001\u0000"+ - "\u0000\u0000\u0002n\u0001\u0000\u0000\u0000\u0002p\u0001\u0000\u0000\u0000"+ - "\u0002r\u0001\u0000\u0000\u0000\u0002t\u0001\u0000\u0000\u0000\u0002v"+ - "\u0001\u0000\u0000\u0000\u0002x\u0001\u0000\u0000\u0000\u0002z\u0001\u0000"+ - "\u0000\u0000\u0002|\u0001\u0000\u0000\u0000\u0002~\u0001\u0000\u0000\u0000"+ - "\u0002\u0080\u0001\u0000\u0000\u0000\u0002\u0082\u0001\u0000\u0000\u0000"+ - "\u0002\u0084\u0001\u0000\u0000\u0000\u0002\u0086\u0001\u0000\u0000\u0000"+ - "\u0002\u0088\u0001\u0000\u0000\u0000\u0002\u008a\u0001\u0000\u0000\u0000"+ - "\u0002\u008c\u0001\u0000\u0000\u0000\u0002\u008e\u0001\u0000\u0000\u0000"+ - "\u0002\u0090\u0001\u0000\u0000\u0000\u0002\u0092\u0001\u0000\u0000\u0000"+ - "\u0002\u0094\u0001\u0000\u0000\u0000\u0002\u0096\u0001\u0000\u0000\u0000"+ - "\u0002\u0098\u0001\u0000\u0000\u0000\u0002\u009a\u0001\u0000\u0000\u0000"+ - "\u0002\u009c\u0001\u0000\u0000\u0000\u0002\u009e\u0001\u0000\u0000\u0000"+ - "\u0003\u00a0\u0001\u0000\u0000\u0000\u0003\u00a2\u0001\u0000\u0000\u0000"+ - "\u0003\u00a4\u0001\u0000\u0000\u0000\u0003\u00a6\u0001\u0000\u0000\u0000"+ - "\u0003\u00a8\u0001\u0000\u0000\u0000\u0003\u00aa\u0001\u0000\u0000\u0000"+ - "\u0003\u00ac\u0001\u0000\u0000\u0000\u0003\u00ae\u0001\u0000\u0000\u0000"+ - "\u0003\u00b0\u0001\u0000\u0000\u0000\u0003\u00b2\u0001\u0000\u0000\u0000"+ - "\u0003\u00b6\u0001\u0000\u0000\u0000\u0003\u00b8\u0001\u0000\u0000\u0000"+ - "\u0003\u00ba\u0001\u0000\u0000\u0000\u0003\u00bc\u0001\u0000\u0000\u0000"+ - "\u0004\u00be\u0001\u0000\u0000\u0000\u0006\u00c8\u0001\u0000\u0000\u0000"+ - "\b\u00cf\u0001\u0000\u0000\u0000\n\u00d8\u0001\u0000\u0000\u0000\f\u00df"+ - "\u0001\u0000\u0000\u0000\u000e\u00e9\u0001\u0000\u0000\u0000\u0010\u00f0"+ - "\u0001\u0000\u0000\u0000\u0012\u00f7\u0001\u0000\u0000\u0000\u0014\u0105"+ - "\u0001\u0000\u0000\u0000\u0016\u010c\u0001\u0000\u0000\u0000\u0018\u0114"+ - "\u0001\u0000\u0000\u0000\u001a\u0120\u0001\u0000\u0000\u0000\u001c\u012a"+ - "\u0001\u0000\u0000\u0000\u001e\u0133\u0001\u0000\u0000\u0000 \u0139\u0001"+ - "\u0000\u0000\u0000\"\u0140\u0001\u0000\u0000\u0000$\u0147\u0001\u0000"+ - "\u0000\u0000&\u014f\u0001\u0000\u0000\u0000(\u0158\u0001\u0000\u0000\u0000"+ - "*\u015e\u0001\u0000\u0000\u0000,\u016f\u0001\u0000\u0000\u0000.\u017f"+ - "\u0001\u0000\u0000\u00000\u0185\u0001\u0000\u0000\u00002\u018a\u0001\u0000"+ - "\u0000\u00004\u018f\u0001\u0000\u0000\u00006\u0193\u0001\u0000\u0000\u0000"+ - "8\u0197\u0001\u0000\u0000\u0000:\u019b\u0001\u0000\u0000\u0000<\u019f"+ - "\u0001\u0000\u0000\u0000>\u01a1\u0001\u0000\u0000\u0000@\u01a3\u0001\u0000"+ - "\u0000\u0000B\u01a6\u0001\u0000\u0000\u0000D\u01a8\u0001\u0000\u0000\u0000"+ - "F\u01ce\u0001\u0000\u0000\u0000H\u01d1\u0001\u0000\u0000\u0000J\u01ff"+ - "\u0001\u0000\u0000\u0000L\u0201\u0001\u0000\u0000\u0000N\u0204\u0001\u0000"+ - "\u0000\u0000P\u0208\u0001\u0000\u0000\u0000R\u020c\u0001\u0000\u0000\u0000"+ - "T\u020e\u0001\u0000\u0000\u0000V\u0210\u0001\u0000\u0000\u0000X\u0215"+ - "\u0001\u0000\u0000\u0000Z\u0217\u0001\u0000\u0000\u0000\\\u021d\u0001"+ - "\u0000\u0000\u0000^\u0223\u0001\u0000\u0000\u0000`\u0228\u0001\u0000\u0000"+ - "\u0000b\u022a\u0001\u0000\u0000\u0000d\u022d\u0001\u0000\u0000\u0000f"+ - "\u0230\u0001\u0000\u0000\u0000h\u0235\u0001\u0000\u0000\u0000j\u0239\u0001"+ - "\u0000\u0000\u0000l\u023e\u0001\u0000\u0000\u0000n\u0244\u0001\u0000\u0000"+ - "\u0000p\u0247\u0001\u0000\u0000\u0000r\u0249\u0001\u0000\u0000\u0000t"+ - "\u024f\u0001\u0000\u0000\u0000v\u0251\u0001\u0000\u0000\u0000x\u0256\u0001"+ - "\u0000\u0000\u0000z\u025b\u0001\u0000\u0000\u0000|\u0265\u0001\u0000\u0000"+ - "\u0000~\u0268\u0001\u0000\u0000\u0000\u0080\u026b\u0001\u0000\u0000\u0000"+ - "\u0082\u026d\u0001\u0000\u0000\u0000\u0084\u0270\u0001\u0000\u0000\u0000"+ - "\u0086\u0272\u0001\u0000\u0000\u0000\u0088\u0275\u0001\u0000\u0000\u0000"+ - "\u008a\u0277\u0001\u0000\u0000\u0000\u008c\u0279\u0001\u0000\u0000\u0000"+ - "\u008e\u027b\u0001\u0000\u0000\u0000\u0090\u027d\u0001\u0000\u0000\u0000"+ - "\u0092\u027f\u0001\u0000\u0000\u0000\u0094\u0284\u0001\u0000\u0000\u0000"+ - "\u0096\u029a\u0001\u0000\u0000\u0000\u0098\u029c\u0001\u0000\u0000\u0000"+ - "\u009a\u02a7\u0001\u0000\u0000\u0000\u009c\u02ab\u0001\u0000\u0000\u0000"+ - "\u009e\u02af\u0001\u0000\u0000\u0000\u00a0\u02b3\u0001\u0000\u0000\u0000"+ - "\u00a2\u02b8\u0001\u0000\u0000\u0000\u00a4\u02be\u0001\u0000\u0000\u0000"+ - "\u00a6\u02c4\u0001\u0000\u0000\u0000\u00a8\u02c8\u0001\u0000\u0000\u0000"+ - "\u00aa\u02cc\u0001\u0000\u0000\u0000\u00ac\u02cf\u0001\u0000\u0000\u0000"+ - "\u00ae\u02d8\u0001\u0000\u0000\u0000\u00b0\u02db\u0001\u0000\u0000\u0000"+ - "\u00b2\u02e1\u0001\u0000\u0000\u0000\u00b4\u02ec\u0001\u0000\u0000\u0000"+ - "\u00b6\u02ee\u0001\u0000\u0000\u0000\u00b8\u02f0\u0001\u0000\u0000\u0000"+ - "\u00ba\u02f4\u0001\u0000\u0000\u0000\u00bc\u02f8\u0001\u0000\u0000\u0000"+ - "\u00be\u00bf\u0005d\u0000\u0000\u00bf\u00c0\u0005i\u0000\u0000\u00c0\u00c1"+ - "\u0005s\u0000\u0000\u00c1\u00c2\u0005s\u0000\u0000\u00c2\u00c3\u0005e"+ - "\u0000\u0000\u00c3\u00c4\u0005c\u0000\u0000\u00c4\u00c5\u0005t\u0000\u0000"+ - "\u00c5\u00c6\u0001\u0000\u0000\u0000\u00c6\u00c7\u0006\u0000\u0000\u0000"+ - "\u00c7\u0005\u0001\u0000\u0000\u0000\u00c8\u00c9\u0005d\u0000\u0000\u00c9"+ - "\u00ca\u0005r\u0000\u0000\u00ca\u00cb\u0005o\u0000\u0000\u00cb\u00cc\u0005"+ - "p\u0000\u0000\u00cc\u00cd\u0001\u0000\u0000\u0000\u00cd\u00ce\u0006\u0001"+ - "\u0001\u0000\u00ce\u0007\u0001\u0000\u0000\u0000\u00cf\u00d0\u0005e\u0000"+ - "\u0000\u00d0\u00d1\u0005n\u0000\u0000\u00d1\u00d2\u0005r\u0000\u0000\u00d2"+ - "\u00d3\u0005i\u0000\u0000\u00d3\u00d4\u0005c\u0000\u0000\u00d4\u00d5\u0005"+ - "h\u0000\u0000\u00d5\u00d6\u0001\u0000\u0000\u0000\u00d6\u00d7\u0006\u0002"+ - "\u0001\u0000\u00d7\t\u0001\u0000\u0000\u0000\u00d8\u00d9\u0005e\u0000"+ - "\u0000\u00d9\u00da\u0005v\u0000\u0000\u00da\u00db\u0005a\u0000\u0000\u00db"+ - "\u00dc\u0005l\u0000\u0000\u00dc\u00dd\u0001\u0000\u0000\u0000\u00dd\u00de"+ - "\u0006\u0003\u0000\u0000\u00de\u000b\u0001\u0000\u0000\u0000\u00df\u00e0"+ - "\u0005e\u0000\u0000\u00e0\u00e1\u0005x\u0000\u0000\u00e1\u00e2\u0005p"+ - "\u0000\u0000\u00e2\u00e3\u0005l\u0000\u0000\u00e3\u00e4\u0005a\u0000\u0000"+ - "\u00e4\u00e5\u0005i\u0000\u0000\u00e5\u00e6\u0005n\u0000\u0000\u00e6\u00e7"+ - "\u0001\u0000\u0000\u0000\u00e7\u00e8\u0006\u0004\u0002\u0000\u00e8\r\u0001"+ - "\u0000\u0000\u0000\u00e9\u00ea\u0005f\u0000\u0000\u00ea\u00eb\u0005r\u0000"+ - "\u0000\u00eb\u00ec\u0005o\u0000\u0000\u00ec\u00ed\u0005m\u0000\u0000\u00ed"+ - "\u00ee\u0001\u0000\u0000\u0000\u00ee\u00ef\u0006\u0005\u0001\u0000\u00ef"+ - "\u000f\u0001\u0000\u0000\u0000\u00f0\u00f1\u0005g\u0000\u0000\u00f1\u00f2"+ - "\u0005r\u0000\u0000\u00f2\u00f3\u0005o\u0000\u0000\u00f3\u00f4\u0005k"+ - "\u0000\u0000\u00f4\u00f5\u0001\u0000\u0000\u0000\u00f5\u00f6\u0006\u0006"+ - "\u0000\u0000\u00f6\u0011\u0001\u0000\u0000\u0000\u00f7\u00f8\u0005i\u0000"+ - "\u0000\u00f8\u00f9\u0005n\u0000\u0000\u00f9\u00fa\u0005l\u0000\u0000\u00fa"+ - "\u00fb\u0005i\u0000\u0000\u00fb\u00fc\u0005n\u0000\u0000\u00fc\u00fd\u0005"+ - "e\u0000\u0000\u00fd\u00fe\u0005s\u0000\u0000\u00fe\u00ff\u0005t\u0000"+ - "\u0000\u00ff\u0100\u0005a\u0000\u0000\u0100\u0101\u0005t\u0000\u0000\u0101"+ - "\u0102\u0005s\u0000\u0000\u0102\u0103\u0001\u0000\u0000\u0000\u0103\u0104"+ - "\u0006\u0007\u0000\u0000\u0104\u0013\u0001\u0000\u0000\u0000\u0105\u0106"+ - "\u0005k\u0000\u0000\u0106\u0107\u0005e\u0000\u0000\u0107\u0108\u0005e"+ - "\u0000\u0000\u0108\u0109\u0005p\u0000\u0000\u0109\u010a\u0001\u0000\u0000"+ - "\u0000\u010a\u010b\u0006\b\u0001\u0000\u010b\u0015\u0001\u0000\u0000\u0000"+ - "\u010c\u010d\u0005l\u0000\u0000\u010d\u010e\u0005i\u0000\u0000\u010e\u010f"+ - "\u0005m\u0000\u0000\u010f\u0110\u0005i\u0000\u0000\u0110\u0111\u0005t"+ - "\u0000\u0000\u0111\u0112\u0001\u0000\u0000\u0000\u0112\u0113\u0006\t\u0000"+ - "\u0000\u0113\u0017\u0001\u0000\u0000\u0000\u0114\u0115\u0005m\u0000\u0000"+ - "\u0115\u0116\u0005v\u0000\u0000\u0116\u0117\u0005_\u0000\u0000\u0117\u0118"+ - "\u0005e\u0000\u0000\u0118\u0119\u0005x\u0000\u0000\u0119\u011a\u0005p"+ - "\u0000\u0000\u011a\u011b\u0005a\u0000\u0000\u011b\u011c\u0005n\u0000\u0000"+ - "\u011c\u011d\u0005d\u0000\u0000\u011d\u011e\u0001\u0000\u0000\u0000\u011e"+ - "\u011f\u0006\n\u0001\u0000\u011f\u0019\u0001\u0000\u0000\u0000\u0120\u0121"+ - "\u0005p\u0000\u0000\u0121\u0122\u0005r\u0000\u0000\u0122\u0123\u0005o"+ - "\u0000\u0000\u0123\u0124\u0005j\u0000\u0000\u0124\u0125\u0005e\u0000\u0000"+ - "\u0125\u0126\u0005c\u0000\u0000\u0126\u0127\u0005t\u0000\u0000\u0127\u0128"+ - "\u0001\u0000\u0000\u0000\u0128\u0129\u0006\u000b\u0001\u0000\u0129\u001b"+ - "\u0001\u0000\u0000\u0000\u012a\u012b\u0005r\u0000\u0000\u012b\u012c\u0005"+ - "e\u0000\u0000\u012c\u012d\u0005n\u0000\u0000\u012d\u012e\u0005a\u0000"+ - "\u0000\u012e\u012f\u0005m\u0000\u0000\u012f\u0130\u0005e\u0000\u0000\u0130"+ - "\u0131\u0001\u0000\u0000\u0000\u0131\u0132\u0006\f\u0001\u0000\u0132\u001d"+ - "\u0001\u0000\u0000\u0000\u0133\u0134\u0005r\u0000\u0000\u0134\u0135\u0005"+ - "o\u0000\u0000\u0135\u0136\u0005w\u0000\u0000\u0136\u0137\u0001\u0000\u0000"+ - "\u0000\u0137\u0138\u0006\r\u0000\u0000\u0138\u001f\u0001\u0000\u0000\u0000"+ - "\u0139\u013a\u0005s\u0000\u0000\u013a\u013b\u0005h\u0000\u0000\u013b\u013c"+ - "\u0005o\u0000\u0000\u013c\u013d\u0005w\u0000\u0000\u013d\u013e\u0001\u0000"+ - "\u0000\u0000\u013e\u013f\u0006\u000e\u0000\u0000\u013f!\u0001\u0000\u0000"+ - "\u0000\u0140\u0141\u0005s\u0000\u0000\u0141\u0142\u0005o\u0000\u0000\u0142"+ - "\u0143\u0005r\u0000\u0000\u0143\u0144\u0005t\u0000\u0000\u0144\u0145\u0001"+ - "\u0000\u0000\u0000\u0145\u0146\u0006\u000f\u0000\u0000\u0146#\u0001\u0000"+ - "\u0000\u0000\u0147\u0148\u0005s\u0000\u0000\u0148\u0149\u0005t\u0000\u0000"+ - "\u0149\u014a\u0005a\u0000\u0000\u014a\u014b\u0005t\u0000\u0000\u014b\u014c"+ - "\u0005s\u0000\u0000\u014c\u014d\u0001\u0000\u0000\u0000\u014d\u014e\u0006"+ - "\u0010\u0000\u0000\u014e%\u0001\u0000\u0000\u0000\u014f\u0150\u0005w\u0000"+ - "\u0000\u0150\u0151\u0005h\u0000\u0000\u0151\u0152\u0005e\u0000\u0000\u0152"+ - "\u0153\u0005r\u0000\u0000\u0153\u0154\u0005e\u0000\u0000\u0154\u0155\u0001"+ - "\u0000\u0000\u0000\u0155\u0156\u0006\u0011\u0000\u0000\u0156\'\u0001\u0000"+ - "\u0000\u0000\u0157\u0159\b\u0000\u0000\u0000\u0158\u0157\u0001\u0000\u0000"+ - "\u0000\u0159\u015a\u0001\u0000\u0000\u0000\u015a\u0158\u0001\u0000\u0000"+ - "\u0000\u015a\u015b\u0001\u0000\u0000\u0000\u015b\u015c\u0001\u0000\u0000"+ - "\u0000\u015c\u015d\u0006\u0012\u0000\u0000\u015d)\u0001\u0000\u0000\u0000"+ - "\u015e\u015f\u0005/\u0000\u0000\u015f\u0160\u0005/\u0000\u0000\u0160\u0164"+ - "\u0001\u0000\u0000\u0000\u0161\u0163\b\u0001\u0000\u0000\u0162\u0161\u0001"+ - "\u0000\u0000\u0000\u0163\u0166\u0001\u0000\u0000\u0000\u0164\u0162\u0001"+ - "\u0000\u0000\u0000\u0164\u0165\u0001\u0000\u0000\u0000\u0165\u0168\u0001"+ - "\u0000\u0000\u0000\u0166\u0164\u0001\u0000\u0000\u0000\u0167\u0169\u0005"+ - "\r\u0000\u0000\u0168\u0167\u0001\u0000\u0000\u0000\u0168\u0169\u0001\u0000"+ - "\u0000\u0000\u0169\u016b\u0001\u0000\u0000\u0000\u016a\u016c\u0005\n\u0000"+ - "\u0000\u016b\u016a\u0001\u0000\u0000\u0000\u016b\u016c\u0001\u0000\u0000"+ - "\u0000\u016c\u016d\u0001\u0000\u0000\u0000\u016d\u016e\u0006\u0013\u0003"+ - "\u0000\u016e+\u0001\u0000\u0000\u0000\u016f\u0170\u0005/\u0000\u0000\u0170"+ - "\u0171\u0005*\u0000\u0000\u0171\u0176\u0001\u0000\u0000\u0000\u0172\u0175"+ - "\u0003,\u0014\u0000\u0173\u0175\t\u0000\u0000\u0000\u0174\u0172\u0001"+ - "\u0000\u0000\u0000\u0174\u0173\u0001\u0000\u0000\u0000\u0175\u0178\u0001"+ - "\u0000\u0000\u0000\u0176\u0177\u0001\u0000\u0000\u0000\u0176\u0174\u0001"+ - "\u0000\u0000\u0000\u0177\u0179\u0001\u0000\u0000\u0000\u0178\u0176\u0001"+ - "\u0000\u0000\u0000\u0179\u017a\u0005*\u0000\u0000\u017a\u017b\u0005/\u0000"+ - "\u0000\u017b\u017c\u0001\u0000\u0000\u0000\u017c\u017d\u0006\u0014\u0003"+ - "\u0000\u017d-\u0001\u0000\u0000\u0000\u017e\u0180\u0007\u0002\u0000\u0000"+ - "\u017f\u017e\u0001\u0000\u0000\u0000\u0180\u0181\u0001\u0000\u0000\u0000"+ - "\u0181\u017f\u0001\u0000\u0000\u0000\u0181\u0182\u0001\u0000\u0000\u0000"+ - "\u0182\u0183\u0001\u0000\u0000\u0000\u0183\u0184\u0006\u0015\u0003\u0000"+ - "\u0184/\u0001\u0000\u0000\u0000\u0185\u0186\u0005[\u0000\u0000\u0186\u0187"+ - "\u0001\u0000\u0000\u0000\u0187\u0188\u0006\u0016\u0004\u0000\u0188\u0189"+ - "\u0006\u0016\u0005\u0000\u01891\u0001\u0000\u0000\u0000\u018a\u018b\u0005"+ - "|\u0000\u0000\u018b\u018c\u0001\u0000\u0000\u0000\u018c\u018d\u0006\u0017"+ - "\u0006\u0000\u018d\u018e\u0006\u0017\u0007\u0000\u018e3\u0001\u0000\u0000"+ - "\u0000\u018f\u0190\u0003.\u0015\u0000\u0190\u0191\u0001\u0000\u0000\u0000"+ - "\u0191\u0192\u0006\u0018\u0003\u0000\u01925\u0001\u0000\u0000\u0000\u0193"+ - "\u0194\u0003*\u0013\u0000\u0194\u0195\u0001\u0000\u0000\u0000\u0195\u0196"+ - "\u0006\u0019\u0003\u0000\u01967\u0001\u0000\u0000\u0000\u0197\u0198\u0003"+ - ",\u0014\u0000\u0198\u0199\u0001\u0000\u0000\u0000\u0199\u019a\u0006\u001a"+ - "\u0003\u0000\u019a9\u0001\u0000\u0000\u0000\u019b\u019c\u0005|\u0000\u0000"+ - "\u019c\u019d\u0001\u0000\u0000\u0000\u019d\u019e\u0006\u001b\u0007\u0000"+ - "\u019e;\u0001\u0000\u0000\u0000\u019f\u01a0\u0007\u0003\u0000\u0000\u01a0"+ - "=\u0001\u0000\u0000\u0000\u01a1\u01a2\u0007\u0004\u0000\u0000\u01a2?\u0001"+ - "\u0000\u0000\u0000\u01a3\u01a4\u0005\\\u0000\u0000\u01a4\u01a5\u0007\u0005"+ - "\u0000\u0000\u01a5A\u0001\u0000\u0000\u0000\u01a6\u01a7\b\u0006\u0000"+ - "\u0000\u01a7C\u0001\u0000\u0000\u0000\u01a8\u01aa\u0007\u0007\u0000\u0000"+ - "\u01a9\u01ab\u0007\b\u0000\u0000\u01aa\u01a9\u0001\u0000\u0000\u0000\u01aa"+ - "\u01ab\u0001\u0000\u0000\u0000\u01ab\u01ad\u0001\u0000\u0000\u0000\u01ac"+ - "\u01ae\u0003<\u001c\u0000\u01ad\u01ac\u0001\u0000\u0000\u0000\u01ae\u01af"+ - "\u0001\u0000\u0000\u0000\u01af\u01ad\u0001\u0000\u0000\u0000\u01af\u01b0"+ - "\u0001\u0000\u0000\u0000\u01b0E\u0001\u0000\u0000\u0000\u01b1\u01b6\u0005"+ - "\"\u0000\u0000\u01b2\u01b5\u0003@\u001e\u0000\u01b3\u01b5\u0003B\u001f"+ - "\u0000\u01b4\u01b2\u0001\u0000\u0000\u0000\u01b4\u01b3\u0001\u0000\u0000"+ - "\u0000\u01b5\u01b8\u0001\u0000\u0000\u0000\u01b6\u01b4\u0001\u0000\u0000"+ - "\u0000\u01b6\u01b7\u0001\u0000\u0000\u0000\u01b7\u01b9\u0001\u0000\u0000"+ - "\u0000\u01b8\u01b6\u0001\u0000\u0000\u0000\u01b9\u01cf\u0005\"\u0000\u0000"+ - "\u01ba\u01bb\u0005\"\u0000\u0000\u01bb\u01bc\u0005\"\u0000\u0000\u01bc"+ - "\u01bd\u0005\"\u0000\u0000\u01bd\u01c1\u0001\u0000\u0000\u0000\u01be\u01c0"+ - "\b\u0001\u0000\u0000\u01bf\u01be\u0001\u0000\u0000\u0000\u01c0\u01c3\u0001"+ - "\u0000\u0000\u0000\u01c1\u01c2\u0001\u0000\u0000\u0000\u01c1\u01bf\u0001"+ - "\u0000\u0000\u0000\u01c2\u01c4\u0001\u0000\u0000\u0000\u01c3\u01c1\u0001"+ - "\u0000\u0000\u0000\u01c4\u01c5\u0005\"\u0000\u0000\u01c5\u01c6\u0005\""+ - "\u0000\u0000\u01c6\u01c7\u0005\"\u0000\u0000\u01c7\u01c9\u0001\u0000\u0000"+ - "\u0000\u01c8\u01ca\u0005\"\u0000\u0000\u01c9\u01c8\u0001\u0000\u0000\u0000"+ - "\u01c9\u01ca\u0001\u0000\u0000\u0000\u01ca\u01cc\u0001\u0000\u0000\u0000"+ - "\u01cb\u01cd\u0005\"\u0000\u0000\u01cc\u01cb\u0001\u0000\u0000\u0000\u01cc"+ - "\u01cd\u0001\u0000\u0000\u0000\u01cd\u01cf\u0001\u0000\u0000\u0000\u01ce"+ - "\u01b1\u0001\u0000\u0000\u0000\u01ce\u01ba\u0001\u0000\u0000\u0000\u01cf"+ - "G\u0001\u0000\u0000\u0000\u01d0\u01d2\u0003<\u001c\u0000\u01d1\u01d0\u0001"+ - "\u0000\u0000\u0000\u01d2\u01d3\u0001\u0000\u0000\u0000\u01d3\u01d1\u0001"+ - "\u0000\u0000\u0000\u01d3\u01d4\u0001\u0000\u0000\u0000\u01d4I\u0001\u0000"+ - "\u0000\u0000\u01d5\u01d7\u0003<\u001c\u0000\u01d6\u01d5\u0001\u0000\u0000"+ - "\u0000\u01d7\u01d8\u0001\u0000\u0000\u0000\u01d8\u01d6\u0001\u0000\u0000"+ - "\u0000\u01d8\u01d9\u0001\u0000\u0000\u0000\u01d9\u01da\u0001\u0000\u0000"+ - "\u0000\u01da\u01de\u0003X*\u0000\u01db\u01dd\u0003<\u001c\u0000\u01dc"+ - "\u01db\u0001\u0000\u0000\u0000\u01dd\u01e0\u0001\u0000\u0000\u0000\u01de"+ - "\u01dc\u0001\u0000\u0000\u0000\u01de\u01df\u0001\u0000\u0000\u0000\u01df"+ - "\u0200\u0001\u0000\u0000\u0000\u01e0\u01de\u0001\u0000\u0000\u0000\u01e1"+ - "\u01e3\u0003X*\u0000\u01e2\u01e4\u0003<\u001c\u0000\u01e3\u01e2\u0001"+ - "\u0000\u0000\u0000\u01e4\u01e5\u0001\u0000\u0000\u0000\u01e5\u01e3\u0001"+ - "\u0000\u0000\u0000\u01e5\u01e6\u0001\u0000\u0000\u0000\u01e6\u0200\u0001"+ - "\u0000\u0000\u0000\u01e7\u01e9\u0003<\u001c\u0000\u01e8\u01e7\u0001\u0000"+ - "\u0000\u0000\u01e9\u01ea\u0001\u0000\u0000\u0000\u01ea\u01e8\u0001\u0000"+ - "\u0000\u0000\u01ea\u01eb\u0001\u0000\u0000\u0000\u01eb\u01f3\u0001\u0000"+ - "\u0000\u0000\u01ec\u01f0\u0003X*\u0000\u01ed\u01ef\u0003<\u001c\u0000"+ - "\u01ee\u01ed\u0001\u0000\u0000\u0000\u01ef\u01f2\u0001\u0000\u0000\u0000"+ - "\u01f0\u01ee\u0001\u0000\u0000\u0000\u01f0\u01f1\u0001\u0000\u0000\u0000"+ - "\u01f1\u01f4\u0001\u0000\u0000\u0000\u01f2\u01f0\u0001\u0000\u0000\u0000"+ - "\u01f3\u01ec\u0001\u0000\u0000\u0000\u01f3\u01f4\u0001\u0000\u0000\u0000"+ - "\u01f4\u01f5\u0001\u0000\u0000\u0000\u01f5\u01f6\u0003D \u0000\u01f6\u0200"+ - "\u0001\u0000\u0000\u0000\u01f7\u01f9\u0003X*\u0000\u01f8\u01fa\u0003<"+ - "\u001c\u0000\u01f9\u01f8\u0001\u0000\u0000\u0000\u01fa\u01fb\u0001\u0000"+ - "\u0000\u0000\u01fb\u01f9\u0001\u0000\u0000\u0000\u01fb\u01fc\u0001\u0000"+ - "\u0000\u0000\u01fc\u01fd\u0001\u0000\u0000\u0000\u01fd\u01fe\u0003D \u0000"+ - "\u01fe\u0200\u0001\u0000\u0000\u0000\u01ff\u01d6\u0001\u0000\u0000\u0000"+ - "\u01ff\u01e1\u0001\u0000\u0000\u0000\u01ff\u01e8\u0001\u0000\u0000\u0000"+ - "\u01ff\u01f7\u0001\u0000\u0000\u0000\u0200K\u0001\u0000\u0000\u0000\u0201"+ - "\u0202\u0005b\u0000\u0000\u0202\u0203\u0005y\u0000\u0000\u0203M\u0001"+ - "\u0000\u0000\u0000\u0204\u0205\u0005a\u0000\u0000\u0205\u0206\u0005n\u0000"+ - "\u0000\u0206\u0207\u0005d\u0000\u0000\u0207O\u0001\u0000\u0000\u0000\u0208"+ - "\u0209\u0005a\u0000\u0000\u0209\u020a\u0005s\u0000\u0000\u020a\u020b\u0005"+ - "c\u0000\u0000\u020bQ\u0001\u0000\u0000\u0000\u020c\u020d\u0005=\u0000"+ - "\u0000\u020dS\u0001\u0000\u0000\u0000\u020e\u020f\u0005,\u0000\u0000\u020f"+ - "U\u0001\u0000\u0000\u0000\u0210\u0211\u0005d\u0000\u0000\u0211\u0212\u0005"+ - "e\u0000\u0000\u0212\u0213\u0005s\u0000\u0000\u0213\u0214\u0005c\u0000"+ - "\u0000\u0214W\u0001\u0000\u0000\u0000\u0215\u0216\u0005.\u0000\u0000\u0216"+ - "Y\u0001\u0000\u0000\u0000\u0217\u0218\u0005f\u0000\u0000\u0218\u0219\u0005"+ - "a\u0000\u0000\u0219\u021a\u0005l\u0000\u0000\u021a\u021b\u0005s\u0000"+ - "\u0000\u021b\u021c\u0005e\u0000\u0000\u021c[\u0001\u0000\u0000\u0000\u021d"+ - "\u021e\u0005f\u0000\u0000\u021e\u021f\u0005i\u0000\u0000\u021f\u0220\u0005"+ - "r\u0000\u0000\u0220\u0221\u0005s\u0000\u0000\u0221\u0222\u0005t\u0000"+ - "\u0000\u0222]\u0001\u0000\u0000\u0000\u0223\u0224\u0005l\u0000\u0000\u0224"+ - "\u0225\u0005a\u0000\u0000\u0225\u0226\u0005s\u0000\u0000\u0226\u0227\u0005"+ - "t\u0000\u0000\u0227_\u0001\u0000\u0000\u0000\u0228\u0229\u0005(\u0000"+ - "\u0000\u0229a\u0001\u0000\u0000\u0000\u022a\u022b\u0005i\u0000\u0000\u022b"+ - "\u022c\u0005n\u0000\u0000\u022cc\u0001\u0000\u0000\u0000\u022d\u022e\u0005"+ - "i\u0000\u0000\u022e\u022f\u0005s\u0000\u0000\u022fe\u0001\u0000\u0000"+ - "\u0000\u0230\u0231\u0005l\u0000\u0000\u0231\u0232\u0005i\u0000\u0000\u0232"+ - "\u0233\u0005k\u0000\u0000\u0233\u0234\u0005e\u0000\u0000\u0234g\u0001"+ - "\u0000\u0000\u0000\u0235\u0236\u0005n\u0000\u0000\u0236\u0237\u0005o\u0000"+ - "\u0000\u0237\u0238\u0005t\u0000\u0000\u0238i\u0001\u0000\u0000\u0000\u0239"+ - "\u023a\u0005n\u0000\u0000\u023a\u023b\u0005u\u0000\u0000\u023b\u023c\u0005"+ - "l\u0000\u0000\u023c\u023d\u0005l\u0000\u0000\u023dk\u0001\u0000\u0000"+ - "\u0000\u023e\u023f\u0005n\u0000\u0000\u023f\u0240\u0005u\u0000\u0000\u0240"+ - "\u0241\u0005l\u0000\u0000\u0241\u0242\u0005l\u0000\u0000\u0242\u0243\u0005"+ - "s\u0000\u0000\u0243m\u0001\u0000\u0000\u0000\u0244\u0245\u0005o\u0000"+ - "\u0000\u0245\u0246\u0005r\u0000\u0000\u0246o\u0001\u0000\u0000\u0000\u0247"+ - "\u0248\u0005?\u0000\u0000\u0248q\u0001\u0000\u0000\u0000\u0249\u024a\u0005"+ - "r\u0000\u0000\u024a\u024b\u0005l\u0000\u0000\u024b\u024c\u0005i\u0000"+ - "\u0000\u024c\u024d\u0005k\u0000\u0000\u024d\u024e\u0005e\u0000\u0000\u024e"+ - "s\u0001\u0000\u0000\u0000\u024f\u0250\u0005)\u0000\u0000\u0250u\u0001"+ - "\u0000\u0000\u0000\u0251\u0252\u0005t\u0000\u0000\u0252\u0253\u0005r\u0000"+ - "\u0000\u0253\u0254\u0005u\u0000\u0000\u0254\u0255\u0005e\u0000\u0000\u0255"+ - "w\u0001\u0000\u0000\u0000\u0256\u0257\u0005i\u0000\u0000\u0257\u0258\u0005"+ - "n\u0000\u0000\u0258\u0259\u0005f\u0000\u0000\u0259\u025a\u0005o\u0000"+ - "\u0000\u025ay\u0001\u0000\u0000\u0000\u025b\u025c\u0005f\u0000\u0000\u025c"+ - "\u025d\u0005u\u0000\u0000\u025d\u025e\u0005n\u0000\u0000\u025e\u025f\u0005"+ - "c\u0000\u0000\u025f\u0260\u0005t\u0000\u0000\u0260\u0261\u0005i\u0000"+ - "\u0000\u0261\u0262\u0005o\u0000\u0000\u0262\u0263\u0005n\u0000\u0000\u0263"+ - "\u0264\u0005s\u0000\u0000\u0264{\u0001\u0000\u0000\u0000\u0265\u0266\u0005"+ - "=\u0000\u0000\u0266\u0267\u0005=\u0000\u0000\u0267}\u0001\u0000\u0000"+ - "\u0000\u0268\u0269\u0005!\u0000\u0000\u0269\u026a\u0005=\u0000\u0000\u026a"+ - "\u007f\u0001\u0000\u0000\u0000\u026b\u026c\u0005<\u0000\u0000\u026c\u0081"+ - "\u0001\u0000\u0000\u0000\u026d\u026e\u0005<\u0000\u0000\u026e\u026f\u0005"+ - "=\u0000\u0000\u026f\u0083\u0001\u0000\u0000\u0000\u0270\u0271\u0005>\u0000"+ - "\u0000\u0271\u0085\u0001\u0000\u0000\u0000\u0272\u0273\u0005>\u0000\u0000"+ - "\u0273\u0274\u0005=\u0000\u0000\u0274\u0087\u0001\u0000\u0000\u0000\u0275"+ - "\u0276\u0005+\u0000\u0000\u0276\u0089\u0001\u0000\u0000\u0000\u0277\u0278"+ - "\u0005-\u0000\u0000\u0278\u008b\u0001\u0000\u0000\u0000\u0279\u027a\u0005"+ - "*\u0000\u0000\u027a\u008d\u0001\u0000\u0000\u0000\u027b\u027c\u0005/\u0000"+ - "\u0000\u027c\u008f\u0001\u0000\u0000\u0000\u027d\u027e\u0005%\u0000\u0000"+ - "\u027e\u0091\u0001\u0000\u0000\u0000\u027f\u0280\u0005[\u0000\u0000\u0280"+ - "\u0281\u0001\u0000\u0000\u0000\u0281\u0282\u0006G\u0000\u0000\u0282\u0283"+ - "\u0006G\u0000\u0000\u0283\u0093\u0001\u0000\u0000\u0000\u0284\u0285\u0005"+ - "]\u0000\u0000\u0285\u0286\u0001\u0000\u0000\u0000\u0286\u0287\u0006H\u0007"+ - "\u0000\u0287\u0288\u0006H\u0007\u0000\u0288\u0095\u0001\u0000\u0000\u0000"+ - "\u0289\u028f\u0003>\u001d\u0000\u028a\u028e\u0003>\u001d\u0000\u028b\u028e"+ - "\u0003<\u001c\u0000\u028c\u028e\u0005_\u0000\u0000\u028d\u028a\u0001\u0000"+ - "\u0000\u0000\u028d\u028b\u0001\u0000\u0000\u0000\u028d\u028c\u0001\u0000"+ - "\u0000\u0000\u028e\u0291\u0001\u0000\u0000\u0000\u028f\u028d\u0001\u0000"+ - "\u0000\u0000\u028f\u0290\u0001\u0000\u0000\u0000\u0290\u029b\u0001\u0000"+ - "\u0000\u0000\u0291\u028f\u0001\u0000\u0000\u0000\u0292\u0296\u0007\t\u0000"+ - "\u0000\u0293\u0297\u0003>\u001d\u0000\u0294\u0297\u0003<\u001c\u0000\u0295"+ - "\u0297\u0005_\u0000\u0000\u0296\u0293\u0001\u0000\u0000\u0000\u0296\u0294"+ - "\u0001\u0000\u0000\u0000\u0296\u0295\u0001\u0000\u0000\u0000\u0297\u0298"+ - "\u0001\u0000\u0000\u0000\u0298\u0296\u0001\u0000\u0000\u0000\u0298\u0299"+ - "\u0001\u0000\u0000\u0000\u0299\u029b\u0001\u0000\u0000\u0000\u029a\u0289"+ - "\u0001\u0000\u0000\u0000\u029a\u0292\u0001\u0000\u0000\u0000\u029b\u0097"+ - "\u0001\u0000\u0000\u0000\u029c\u02a2\u0005`\u0000\u0000\u029d\u02a1\b"+ - "\n\u0000\u0000\u029e\u029f\u0005`\u0000\u0000\u029f\u02a1\u0005`\u0000"+ - "\u0000\u02a0\u029d\u0001\u0000\u0000\u0000\u02a0\u029e\u0001\u0000\u0000"+ - "\u0000\u02a1\u02a4\u0001\u0000\u0000\u0000\u02a2\u02a0\u0001\u0000\u0000"+ - "\u0000\u02a2\u02a3\u0001\u0000\u0000\u0000\u02a3\u02a5\u0001\u0000\u0000"+ - "\u0000\u02a4\u02a2\u0001\u0000\u0000\u0000\u02a5\u02a6\u0005`\u0000\u0000"+ - "\u02a6\u0099\u0001\u0000\u0000\u0000\u02a7\u02a8\u0003*\u0013\u0000\u02a8"+ - "\u02a9\u0001\u0000\u0000\u0000\u02a9\u02aa\u0006K\u0003\u0000\u02aa\u009b"+ - "\u0001\u0000\u0000\u0000\u02ab\u02ac\u0003,\u0014\u0000\u02ac\u02ad\u0001"+ - "\u0000\u0000\u0000\u02ad\u02ae\u0006L\u0003\u0000\u02ae\u009d\u0001\u0000"+ - "\u0000\u0000\u02af\u02b0\u0003.\u0015\u0000\u02b0\u02b1\u0001\u0000\u0000"+ - "\u0000\u02b1\u02b2\u0006M\u0003\u0000\u02b2\u009f\u0001\u0000\u0000\u0000"+ - "\u02b3\u02b4\u0005|\u0000\u0000\u02b4\u02b5\u0001\u0000\u0000\u0000\u02b5"+ - "\u02b6\u0006N\u0006\u0000\u02b6\u02b7\u0006N\u0007\u0000\u02b7\u00a1\u0001"+ - "\u0000\u0000\u0000\u02b8\u02b9\u0005[\u0000\u0000\u02b9\u02ba\u0001\u0000"+ - "\u0000\u0000\u02ba\u02bb\u0006O\u0004\u0000\u02bb\u02bc\u0006O\u0001\u0000"+ - "\u02bc\u02bd\u0006O\u0001\u0000\u02bd\u00a3\u0001\u0000\u0000\u0000\u02be"+ - "\u02bf\u0005]\u0000\u0000\u02bf\u02c0\u0001\u0000\u0000\u0000\u02c0\u02c1"+ - "\u0006P\u0007\u0000\u02c1\u02c2\u0006P\u0007\u0000\u02c2\u02c3\u0006P"+ - "\b\u0000\u02c3\u00a5\u0001\u0000\u0000\u0000\u02c4\u02c5\u0005,\u0000"+ - "\u0000\u02c5\u02c6\u0001\u0000\u0000\u0000\u02c6\u02c7\u0006Q\t\u0000"+ - "\u02c7\u00a7\u0001\u0000\u0000\u0000\u02c8\u02c9\u0005=\u0000\u0000\u02c9"+ - "\u02ca\u0001\u0000\u0000\u0000\u02ca\u02cb\u0006R\n\u0000\u02cb\u00a9"+ - "\u0001\u0000\u0000\u0000\u02cc\u02cd\u0005a\u0000\u0000\u02cd\u02ce\u0005"+ - "s\u0000\u0000\u02ce\u00ab\u0001\u0000\u0000\u0000\u02cf\u02d0\u0005m\u0000"+ - "\u0000\u02d0\u02d1\u0005e\u0000\u0000\u02d1\u02d2\u0005t\u0000\u0000\u02d2"+ - "\u02d3\u0005a\u0000\u0000\u02d3\u02d4\u0005d\u0000\u0000\u02d4\u02d5\u0005"+ - "a\u0000\u0000\u02d5\u02d6\u0005t\u0000\u0000\u02d6\u02d7\u0005a\u0000"+ - "\u0000\u02d7\u00ad\u0001\u0000\u0000\u0000\u02d8\u02d9\u0005o\u0000\u0000"+ - "\u02d9\u02da\u0005n\u0000\u0000\u02da\u00af\u0001\u0000\u0000\u0000\u02db"+ - "\u02dc\u0005w\u0000\u0000\u02dc\u02dd\u0005i\u0000\u0000\u02dd\u02de\u0005"+ - "t\u0000\u0000\u02de\u02df\u0005h\u0000\u0000\u02df\u00b1\u0001\u0000\u0000"+ - "\u0000\u02e0\u02e2\u0003\u00b4X\u0000\u02e1\u02e0\u0001\u0000\u0000\u0000"+ - "\u02e2\u02e3\u0001\u0000\u0000\u0000\u02e3\u02e1\u0001\u0000\u0000\u0000"+ - "\u02e3\u02e4\u0001\u0000\u0000\u0000\u02e4\u00b3\u0001\u0000\u0000\u0000"+ - "\u02e5\u02e7\b\u000b\u0000\u0000\u02e6\u02e5\u0001\u0000\u0000\u0000\u02e7"+ - "\u02e8\u0001\u0000\u0000\u0000\u02e8\u02e6\u0001\u0000\u0000\u0000\u02e8"+ - "\u02e9\u0001\u0000\u0000\u0000\u02e9\u02ed\u0001\u0000\u0000\u0000\u02ea"+ - "\u02eb\u0005/\u0000\u0000\u02eb\u02ed\b\f\u0000\u0000\u02ec\u02e6\u0001"+ - "\u0000\u0000\u0000\u02ec\u02ea\u0001\u0000\u0000\u0000\u02ed\u00b5\u0001"+ - "\u0000\u0000\u0000\u02ee\u02ef\u0003\u0098J\u0000\u02ef\u00b7\u0001\u0000"+ - "\u0000\u0000\u02f0\u02f1\u0003*\u0013\u0000\u02f1\u02f2\u0001\u0000\u0000"+ - "\u0000\u02f2\u02f3\u0006Z\u0003\u0000\u02f3\u00b9\u0001\u0000\u0000\u0000"+ - "\u02f4\u02f5\u0003,\u0014\u0000\u02f5\u02f6\u0001\u0000\u0000\u0000\u02f6"+ - "\u02f7\u0006[\u0003\u0000\u02f7\u00bb\u0001\u0000\u0000\u0000\u02f8\u02f9"+ - "\u0003.\u0015\u0000\u02f9\u02fa\u0001\u0000\u0000\u0000\u02fa\u02fb\u0006"+ - "\\\u0003\u0000\u02fb\u00bd\u0001\u0000\u0000\u0000&\u0000\u0001\u0002"+ - "\u0003\u015a\u0164\u0168\u016b\u0174\u0176\u0181\u01aa\u01af\u01b4\u01b6"+ - "\u01c1\u01c9\u01cc\u01ce\u01d3\u01d8\u01de\u01e5\u01ea\u01f0\u01f3\u01fb"+ - "\u01ff\u028d\u028f\u0296\u0298\u029a\u02a0\u02a2\u02e3\u02e8\u02ec\u000b"+ - "\u0005\u0002\u0000\u0005\u0003\u0000\u0005\u0001\u0000\u0000\u0001\u0000"+ - "\u0007A\u0000\u0005\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000\u0007"+ - "B\u0000\u0007\"\u0000\u0007!\u0000"; + "\u0001\u0010\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0011"+ + "\u0001\u0011\u0001\u0011\u0001\u0011\u0001\u0012\u0004\u0012\u01c3\b\u0012"+ + "\u000b\u0012\f\u0012\u01c4\u0001\u0012\u0001\u0012\u0001\u0013\u0001\u0013"+ + "\u0001\u0013\u0001\u0013\u0005\u0013\u01cd\b\u0013\n\u0013\f\u0013\u01d0"+ + "\t\u0013\u0001\u0013\u0003\u0013\u01d3\b\u0013\u0001\u0013\u0003\u0013"+ + "\u01d6\b\u0013\u0001\u0013\u0001\u0013\u0001\u0014\u0001\u0014\u0001\u0014"+ + "\u0001\u0014\u0001\u0014\u0005\u0014\u01df\b\u0014\n\u0014\f\u0014\u01e2"+ + "\t\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001\u0014\u0001"+ + "\u0015\u0004\u0015\u01ea\b\u0015\u000b\u0015\f\u0015\u01eb\u0001\u0015"+ + "\u0001\u0015\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016\u0001\u0016"+ + "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0018"+ + "\u0001\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019"+ + "\u0001\u0019\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001a\u0001\u001b"+ + "\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001c\u0001\u001c\u0001\u001d"+ + "\u0001\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f\u0001\u001f"+ + "\u0001 \u0001 \u0003 \u0215\b \u0001 \u0004 \u0218\b \u000b \f \u0219"+ + "\u0001!\u0001!\u0001\"\u0001\"\u0001#\u0001#\u0001#\u0003#\u0223\b#\u0001"+ + "$\u0001$\u0001%\u0001%\u0001%\u0003%\u022a\b%\u0001&\u0001&\u0001&\u0005"+ + "&\u022f\b&\n&\f&\u0232\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0001&\u0005"+ + "&\u023a\b&\n&\f&\u023d\t&\u0001&\u0001&\u0001&\u0001&\u0001&\u0003&\u0244"+ + "\b&\u0001&\u0003&\u0247\b&\u0003&\u0249\b&\u0001\'\u0004\'\u024c\b\'\u000b"+ + "\'\f\'\u024d\u0001(\u0004(\u0251\b(\u000b(\f(\u0252\u0001(\u0001(\u0005"+ + "(\u0257\b(\n(\f(\u025a\t(\u0001(\u0001(\u0004(\u025e\b(\u000b(\f(\u025f"+ + "\u0001(\u0004(\u0263\b(\u000b(\f(\u0264\u0001(\u0001(\u0005(\u0269\b("+ + "\n(\f(\u026c\t(\u0003(\u026e\b(\u0001(\u0001(\u0001(\u0001(\u0004(\u0274"+ + "\b(\u000b(\f(\u0275\u0001(\u0001(\u0003(\u027a\b(\u0001)\u0001)\u0001"+ + ")\u0001*\u0001*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001"+ + ",\u0001-\u0001-\u0001.\u0001.\u0001.\u0001.\u0001.\u0001/\u0001/\u0001"+ + "0\u00010\u00010\u00010\u00010\u00010\u00011\u00011\u00011\u00011\u0001"+ + "1\u00011\u00012\u00012\u00012\u00012\u00012\u00013\u00013\u00014\u0001"+ + "4\u00014\u00015\u00015\u00015\u00016\u00016\u00016\u00016\u00016\u0001"+ + "7\u00017\u00017\u00017\u00018\u00018\u00018\u00018\u00018\u00019\u0001"+ + "9\u00019\u00019\u00019\u00019\u0001:\u0001:\u0001:\u0001;\u0001;\u0001"+ + "<\u0001<\u0001<\u0001<\u0001<\u0001<\u0001=\u0001=\u0001>\u0001>\u0001"+ + ">\u0001>\u0001>\u0001?\u0001?\u0001?\u0001@\u0001@\u0001@\u0001A\u0001"+ + "A\u0001B\u0001B\u0001B\u0001C\u0001C\u0001D\u0001D\u0001D\u0001E\u0001"+ + "E\u0001F\u0001F\u0001G\u0001G\u0001H\u0001H\u0001I\u0001I\u0001J\u0001"+ + "J\u0001J\u0001J\u0001J\u0001K\u0001K\u0001K\u0001K\u0001K\u0001L\u0001"+ + "L\u0005L\u02f7\bL\nL\fL\u02fa\tL\u0001L\u0001L\u0003L\u02fe\bL\u0001L"+ + "\u0004L\u0301\bL\u000bL\fL\u0302\u0003L\u0305\bL\u0001M\u0001M\u0004M"+ + "\u0309\bM\u000bM\fM\u030a\u0001M\u0001M\u0001N\u0001N\u0001N\u0001N\u0001"+ + "O\u0001O\u0001O\u0001O\u0001P\u0001P\u0001P\u0001P\u0001Q\u0001Q\u0001"+ + "Q\u0001Q\u0001Q\u0001R\u0001R\u0001R\u0001R\u0001R\u0001R\u0001S\u0001"+ + "S\u0001S\u0001S\u0001S\u0001S\u0001T\u0001T\u0001T\u0001T\u0001U\u0001"+ + "U\u0001U\u0001U\u0001V\u0001V\u0001V\u0001V\u0001V\u0001V\u0001V\u0001"+ + "V\u0001V\u0001W\u0001W\u0001W\u0003W\u0340\bW\u0001X\u0004X\u0343\bX\u000b"+ + "X\fX\u0344\u0001Y\u0001Y\u0001Y\u0001Y\u0001Z\u0001Z\u0001Z\u0001Z\u0001"+ + "[\u0001[\u0001[\u0001[\u0001\\\u0001\\\u0001\\\u0001\\\u0001]\u0001]\u0001"+ + "]\u0001]\u0001]\u0001^\u0001^\u0001^\u0001^\u0001_\u0001_\u0001_\u0001"+ + "_\u0001`\u0001`\u0001`\u0001`\u0003`\u0368\b`\u0001a\u0001a\u0003a\u036c"+ + "\ba\u0001a\u0005a\u036f\ba\na\fa\u0372\ta\u0001a\u0001a\u0003a\u0376\b"+ + "a\u0001a\u0004a\u0379\ba\u000ba\fa\u037a\u0003a\u037d\ba\u0001b\u0001"+ + "b\u0001b\u0001b\u0001c\u0001c\u0001c\u0001c\u0001d\u0001d\u0001d\u0001"+ + "d\u0001e\u0001e\u0001e\u0001e\u0001f\u0001f\u0001f\u0001f\u0001f\u0001"+ + "g\u0001g\u0001g\u0001g\u0001h\u0001h\u0001h\u0001h\u0001i\u0001i\u0001"+ + "i\u0001i\u0001j\u0001j\u0001j\u0001k\u0001k\u0001k\u0001k\u0001l\u0001"+ + "l\u0001l\u0001l\u0001m\u0001m\u0001m\u0001m\u0001n\u0001n\u0001n\u0001"+ + "n\u0001o\u0001o\u0001o\u0001o\u0001p\u0001p\u0001p\u0001p\u0001p\u0001"+ + "q\u0001q\u0001q\u0001q\u0001q\u0001r\u0001r\u0001r\u0001r\u0001r\u0001"+ + "r\u0001r\u0001s\u0001s\u0001s\u0001s\u0001t\u0001t\u0001t\u0001t\u0001"+ + "u\u0001u\u0001u\u0001u\u0001v\u0001v\u0001v\u0001v\u0001w\u0001w\u0001"+ + "w\u0001w\u0001x\u0001x\u0001x\u0001x\u0001x\u0001x\u0001y\u0001y\u0001"+ + "y\u0001y\u0001z\u0001z\u0001z\u0001z\u0001{\u0001{\u0001{\u0001{\u0001"+ + "|\u0001|\u0001|\u0001|\u0001}\u0001}\u0001}\u0001}\u0001~\u0001~\u0001"+ + "~\u0001~\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u007f\u0001\u0080\u0001"+ + "\u0080\u0001\u0080\u0001\u0080\u0001\u0081\u0001\u0081\u0001\u0081\u0001"+ + "\u0081\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001\u0082\u0001"+ + "\u0083\u0001\u0083\u0001\u0083\u0001\u0083\u0001\u0084\u0001\u0084\u0001"+ + "\u0084\u0001\u0084\u0001\u0085\u0001\u0085\u0001\u0085\u0001\u0085\u0001"+ + "\u0086\u0001\u0086\u0001\u0086\u0001\u0086\u0001\u0087\u0001\u0087\u0001"+ + "\u0087\u0001\u0087\u0001\u0088\u0001\u0088\u0001\u0088\u0001\u0088\u0001"+ + "\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u0089\u0001\u008a\u0001"+ + "\u008a\u0001\u008a\u0001\u008a\u0001\u008a\u0001\u008b\u0001\u008b\u0001"+ + "\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001\u008b\u0001"+ + "\u008b\u0001\u008b\u0001\u008c\u0001\u008c\u0001\u008c\u0001\u008c\u0001"+ + "\u008d\u0001\u008d\u0001\u008d\u0001\u008d\u0001\u008e\u0001\u008e\u0001"+ + "\u008e\u0001\u008e\u0002\u01e0\u023b\u0000\u008f\n\u0001\f\u0002\u000e"+ + "\u0003\u0010\u0004\u0012\u0005\u0014\u0006\u0016\u0007\u0018\b\u001a\t"+ + "\u001c\n\u001e\u000b \f\"\r$\u000e&\u000f(\u0010*\u0011,\u0012.\u0013"+ + "0\u00142\u00154\u00166\u00008\u0000:\u0017<\u0018>\u0019@\u001aB\u0000"+ + "D\u0000F\u0000H\u0000J\u0000L\u0000N\u0000P\u0000R\u0000T\u0000V\u001b"+ + "X\u001cZ\u001d\\\u001e^\u001f` b!d\"f#h$j%l&n\'p(r)t*v+x,z-|.~/\u0080"+ + "0\u00821\u00842\u00863\u00884\u008a5\u008c6\u008e7\u00908\u00929\u0094"+ + ":\u0096;\u0098<\u009a=\u009c>\u009e?\u00a0@\u00a2A\u00a4B\u00a6C\u00a8"+ + "D\u00aaE\u00ac\u0000\u00ae\u0000\u00b0\u0000\u00b2\u0000\u00b4\u0000\u00b6"+ + "F\u00b8\u0000\u00baG\u00bc\u0000\u00beH\u00c0I\u00c2J\u00c4\u0000\u00c6"+ + "\u0000\u00c8\u0000\u00ca\u0000\u00ccK\u00ce\u0000\u00d0L\u00d2M\u00d4"+ + "N\u00d6\u0000\u00d8\u0000\u00da\u0000\u00dc\u0000\u00deO\u00e0\u0000\u00e2"+ + "\u0000\u00e4P\u00e6Q\u00e8R\u00ea\u0000\u00ecS\u00eeT\u00f0\u0000\u00f2"+ + "\u0000\u00f4U\u00f6V\u00f8W\u00fa\u0000\u00fc\u0000\u00fe\u0000\u0100"+ + "\u0000\u0102\u0000\u0104\u0000\u0106\u0000\u0108X\u010aY\u010cZ\u010e"+ + "\u0000\u0110\u0000\u0112\u0000\u0114\u0000\u0116[\u0118\\\u011a]\u011c"+ + "\u0000\u011e^\u0120_\u0122`\u0124a\u0126b\n\u0000\u0001\u0002\u0003\u0004"+ + "\u0005\u0006\u0007\b\t\f\u0006\u0000\t\n\r\r //[[]]\u0002\u0000\n\n\r"+ + "\r\u0003\u0000\t\n\r\r \u0001\u000009\u0002\u0000AZaz\u0005\u0000\"\""+ + "\\\\nnrrtt\u0004\u0000\n\n\r\r\"\"\\\\\u0002\u0000EEee\u0002\u0000++-"+ + "-\u0001\u0000``\n\u0000\t\n\r\r ,,//==[[]]``||\u0002\u0000**//\u0458"+ + "\u0000\n\u0001\u0000\u0000\u0000\u0000\f\u0001\u0000\u0000\u0000\u0000"+ + "\u000e\u0001\u0000\u0000\u0000\u0000\u0010\u0001\u0000\u0000\u0000\u0000"+ + "\u0012\u0001\u0000\u0000\u0000\u0000\u0014\u0001\u0000\u0000\u0000\u0000"+ + "\u0016\u0001\u0000\u0000\u0000\u0000\u0018\u0001\u0000\u0000\u0000\u0000"+ + "\u001a\u0001\u0000\u0000\u0000\u0000\u001c\u0001\u0000\u0000\u0000\u0000"+ + "\u001e\u0001\u0000\u0000\u0000\u0000 \u0001\u0000\u0000\u0000\u0000\""+ + "\u0001\u0000\u0000\u0000\u0000$\u0001\u0000\u0000\u0000\u0000&\u0001\u0000"+ + "\u0000\u0000\u0000(\u0001\u0000\u0000\u0000\u0000*\u0001\u0000\u0000\u0000"+ + "\u0000,\u0001\u0000\u0000\u0000\u0000.\u0001\u0000\u0000\u0000\u00000"+ + "\u0001\u0000\u0000\u0000\u00002\u0001\u0000\u0000\u0000\u00004\u0001\u0000"+ + "\u0000\u0000\u00016\u0001\u0000\u0000\u0000\u00018\u0001\u0000\u0000\u0000"+ + "\u0001:\u0001\u0000\u0000\u0000\u0001<\u0001\u0000\u0000\u0000\u0001>"+ + "\u0001\u0000\u0000\u0000\u0002@\u0001\u0000\u0000\u0000\u0002V\u0001\u0000"+ + "\u0000\u0000\u0002X\u0001\u0000\u0000\u0000\u0002Z\u0001\u0000\u0000\u0000"+ + "\u0002\\\u0001\u0000\u0000\u0000\u0002^\u0001\u0000\u0000\u0000\u0002"+ + "`\u0001\u0000\u0000\u0000\u0002b\u0001\u0000\u0000\u0000\u0002d\u0001"+ + "\u0000\u0000\u0000\u0002f\u0001\u0000\u0000\u0000\u0002h\u0001\u0000\u0000"+ + "\u0000\u0002j\u0001\u0000\u0000\u0000\u0002l\u0001\u0000\u0000\u0000\u0002"+ + "n\u0001\u0000\u0000\u0000\u0002p\u0001\u0000\u0000\u0000\u0002r\u0001"+ + "\u0000\u0000\u0000\u0002t\u0001\u0000\u0000\u0000\u0002v\u0001\u0000\u0000"+ + "\u0000\u0002x\u0001\u0000\u0000\u0000\u0002z\u0001\u0000\u0000\u0000\u0002"+ + "|\u0001\u0000\u0000\u0000\u0002~\u0001\u0000\u0000\u0000\u0002\u0080\u0001"+ + "\u0000\u0000\u0000\u0002\u0082\u0001\u0000\u0000\u0000\u0002\u0084\u0001"+ + "\u0000\u0000\u0000\u0002\u0086\u0001\u0000\u0000\u0000\u0002\u0088\u0001"+ + "\u0000\u0000\u0000\u0002\u008a\u0001\u0000\u0000\u0000\u0002\u008c\u0001"+ + "\u0000\u0000\u0000\u0002\u008e\u0001\u0000\u0000\u0000\u0002\u0090\u0001"+ + "\u0000\u0000\u0000\u0002\u0092\u0001\u0000\u0000\u0000\u0002\u0094\u0001"+ + "\u0000\u0000\u0000\u0002\u0096\u0001\u0000\u0000\u0000\u0002\u0098\u0001"+ + "\u0000\u0000\u0000\u0002\u009a\u0001\u0000\u0000\u0000\u0002\u009c\u0001"+ + "\u0000\u0000\u0000\u0002\u009e\u0001\u0000\u0000\u0000\u0002\u00a0\u0001"+ + "\u0000\u0000\u0000\u0002\u00a2\u0001\u0000\u0000\u0000\u0002\u00a4\u0001"+ + "\u0000\u0000\u0000\u0002\u00a6\u0001\u0000\u0000\u0000\u0002\u00a8\u0001"+ + "\u0000\u0000\u0000\u0002\u00aa\u0001\u0000\u0000\u0000\u0003\u00ac\u0001"+ + "\u0000\u0000\u0000\u0003\u00ae\u0001\u0000\u0000\u0000\u0003\u00b0\u0001"+ + "\u0000\u0000\u0000\u0003\u00b2\u0001\u0000\u0000\u0000\u0003\u00b4\u0001"+ + "\u0000\u0000\u0000\u0003\u00b6\u0001\u0000\u0000\u0000\u0003\u00ba\u0001"+ + "\u0000\u0000\u0000\u0003\u00bc\u0001\u0000\u0000\u0000\u0003\u00be\u0001"+ + "\u0000\u0000\u0000\u0003\u00c0\u0001\u0000\u0000\u0000\u0003\u00c2\u0001"+ + "\u0000\u0000\u0000\u0004\u00c4\u0001\u0000\u0000\u0000\u0004\u00c6\u0001"+ + "\u0000\u0000\u0000\u0004\u00c8\u0001\u0000\u0000\u0000\u0004\u00cc\u0001"+ + "\u0000\u0000\u0000\u0004\u00ce\u0001\u0000\u0000\u0000\u0004\u00d0\u0001"+ + "\u0000\u0000\u0000\u0004\u00d2\u0001\u0000\u0000\u0000\u0004\u00d4\u0001"+ + "\u0000\u0000\u0000\u0005\u00d6\u0001\u0000\u0000\u0000\u0005\u00d8\u0001"+ + "\u0000\u0000\u0000\u0005\u00da\u0001\u0000\u0000\u0000\u0005\u00dc\u0001"+ + "\u0000\u0000\u0000\u0005\u00de\u0001\u0000\u0000\u0000\u0005\u00e0\u0001"+ + "\u0000\u0000\u0000\u0005\u00e2\u0001\u0000\u0000\u0000\u0005\u00e4\u0001"+ + "\u0000\u0000\u0000\u0005\u00e6\u0001\u0000\u0000\u0000\u0005\u00e8\u0001"+ + "\u0000\u0000\u0000\u0006\u00ea\u0001\u0000\u0000\u0000\u0006\u00ec\u0001"+ + "\u0000\u0000\u0000\u0006\u00ee\u0001\u0000\u0000\u0000\u0006\u00f0\u0001"+ + "\u0000\u0000\u0000\u0006\u00f2\u0001\u0000\u0000\u0000\u0006\u00f4\u0001"+ + "\u0000\u0000\u0000\u0006\u00f6\u0001\u0000\u0000\u0000\u0006\u00f8\u0001"+ + "\u0000\u0000\u0000\u0007\u00fa\u0001\u0000\u0000\u0000\u0007\u00fc\u0001"+ + "\u0000\u0000\u0000\u0007\u00fe\u0001\u0000\u0000\u0000\u0007\u0100\u0001"+ + "\u0000\u0000\u0000\u0007\u0102\u0001\u0000\u0000\u0000\u0007\u0104\u0001"+ + "\u0000\u0000\u0000\u0007\u0106\u0001\u0000\u0000\u0000\u0007\u0108\u0001"+ + "\u0000\u0000\u0000\u0007\u010a\u0001\u0000\u0000\u0000\u0007\u010c\u0001"+ + "\u0000\u0000\u0000\b\u010e\u0001\u0000\u0000\u0000\b\u0110\u0001\u0000"+ + "\u0000\u0000\b\u0112\u0001\u0000\u0000\u0000\b\u0114\u0001\u0000\u0000"+ + "\u0000\b\u0116\u0001\u0000\u0000\u0000\b\u0118\u0001\u0000\u0000\u0000"+ + "\b\u011a\u0001\u0000\u0000\u0000\t\u011c\u0001\u0000\u0000\u0000\t\u011e"+ + "\u0001\u0000\u0000\u0000\t\u0120\u0001\u0000\u0000\u0000\t\u0122\u0001"+ + "\u0000\u0000\u0000\t\u0124\u0001\u0000\u0000\u0000\t\u0126\u0001\u0000"+ + "\u0000\u0000\n\u0128\u0001\u0000\u0000\u0000\f\u0132\u0001\u0000\u0000"+ + "\u0000\u000e\u0139\u0001\u0000\u0000\u0000\u0010\u0142\u0001\u0000\u0000"+ + "\u0000\u0012\u0149\u0001\u0000\u0000\u0000\u0014\u0153\u0001\u0000\u0000"+ + "\u0000\u0016\u015a\u0001\u0000\u0000\u0000\u0018\u0161\u0001\u0000\u0000"+ + "\u0000\u001a\u016f\u0001\u0000\u0000\u0000\u001c\u0176\u0001\u0000\u0000"+ + "\u0000\u001e\u017e\u0001\u0000\u0000\u0000 \u018a\u0001\u0000\u0000\u0000"+ + "\"\u0194\u0001\u0000\u0000\u0000$\u019d\u0001\u0000\u0000\u0000&\u01a3"+ + "\u0001\u0000\u0000\u0000(\u01aa\u0001\u0000\u0000\u0000*\u01b1\u0001\u0000"+ + "\u0000\u0000,\u01b9\u0001\u0000\u0000\u0000.\u01c2\u0001\u0000\u0000\u0000"+ + "0\u01c8\u0001\u0000\u0000\u00002\u01d9\u0001\u0000\u0000\u00004\u01e9"+ + "\u0001\u0000\u0000\u00006\u01ef\u0001\u0000\u0000\u00008\u01f4\u0001\u0000"+ + "\u0000\u0000:\u01f9\u0001\u0000\u0000\u0000<\u01fd\u0001\u0000\u0000\u0000"+ + ">\u0201\u0001\u0000\u0000\u0000@\u0205\u0001\u0000\u0000\u0000B\u0209"+ + "\u0001\u0000\u0000\u0000D\u020b\u0001\u0000\u0000\u0000F\u020d\u0001\u0000"+ + "\u0000\u0000H\u0210\u0001\u0000\u0000\u0000J\u0212\u0001\u0000\u0000\u0000"+ + "L\u021b\u0001\u0000\u0000\u0000N\u021d\u0001\u0000\u0000\u0000P\u0222"+ + "\u0001\u0000\u0000\u0000R\u0224\u0001\u0000\u0000\u0000T\u0229\u0001\u0000"+ + "\u0000\u0000V\u0248\u0001\u0000\u0000\u0000X\u024b\u0001\u0000\u0000\u0000"+ + "Z\u0279\u0001\u0000\u0000\u0000\\\u027b\u0001\u0000\u0000\u0000^\u027e"+ + "\u0001\u0000\u0000\u0000`\u0282\u0001\u0000\u0000\u0000b\u0286\u0001\u0000"+ + "\u0000\u0000d\u0288\u0001\u0000\u0000\u0000f\u028a\u0001\u0000\u0000\u0000"+ + "h\u028f\u0001\u0000\u0000\u0000j\u0291\u0001\u0000\u0000\u0000l\u0297"+ + "\u0001\u0000\u0000\u0000n\u029d\u0001\u0000\u0000\u0000p\u02a2\u0001\u0000"+ + "\u0000\u0000r\u02a4\u0001\u0000\u0000\u0000t\u02a7\u0001\u0000\u0000\u0000"+ + "v\u02aa\u0001\u0000\u0000\u0000x\u02af\u0001\u0000\u0000\u0000z\u02b3"+ + "\u0001\u0000\u0000\u0000|\u02b8\u0001\u0000\u0000\u0000~\u02be\u0001\u0000"+ + "\u0000\u0000\u0080\u02c1\u0001\u0000\u0000\u0000\u0082\u02c3\u0001\u0000"+ + "\u0000\u0000\u0084\u02c9\u0001\u0000\u0000\u0000\u0086\u02cb\u0001\u0000"+ + "\u0000\u0000\u0088\u02d0\u0001\u0000\u0000\u0000\u008a\u02d3\u0001\u0000"+ + "\u0000\u0000\u008c\u02d6\u0001\u0000\u0000\u0000\u008e\u02d8\u0001\u0000"+ + "\u0000\u0000\u0090\u02db\u0001\u0000\u0000\u0000\u0092\u02dd\u0001\u0000"+ + "\u0000\u0000\u0094\u02e0\u0001\u0000\u0000\u0000\u0096\u02e2\u0001\u0000"+ + "\u0000\u0000\u0098\u02e4\u0001\u0000\u0000\u0000\u009a\u02e6\u0001\u0000"+ + "\u0000\u0000\u009c\u02e8\u0001\u0000\u0000\u0000\u009e\u02ea\u0001\u0000"+ + "\u0000\u0000\u00a0\u02ef\u0001\u0000\u0000\u0000\u00a2\u0304\u0001\u0000"+ + "\u0000\u0000\u00a4\u0306\u0001\u0000\u0000\u0000\u00a6\u030e\u0001\u0000"+ + "\u0000\u0000\u00a8\u0312\u0001\u0000\u0000\u0000\u00aa\u0316\u0001\u0000"+ + "\u0000\u0000\u00ac\u031a\u0001\u0000\u0000\u0000\u00ae\u031f\u0001\u0000"+ + "\u0000\u0000\u00b0\u0325\u0001\u0000\u0000\u0000\u00b2\u032b\u0001\u0000"+ + "\u0000\u0000\u00b4\u032f\u0001\u0000\u0000\u0000\u00b6\u0333\u0001\u0000"+ + "\u0000\u0000\u00b8\u033f\u0001\u0000\u0000\u0000\u00ba\u0342\u0001\u0000"+ + "\u0000\u0000\u00bc\u0346\u0001\u0000\u0000\u0000\u00be\u034a\u0001\u0000"+ + "\u0000\u0000\u00c0\u034e\u0001\u0000\u0000\u0000\u00c2\u0352\u0001\u0000"+ + "\u0000\u0000\u00c4\u0356\u0001\u0000\u0000\u0000\u00c6\u035b\u0001\u0000"+ + "\u0000\u0000\u00c8\u035f\u0001\u0000\u0000\u0000\u00ca\u0367\u0001\u0000"+ + "\u0000\u0000\u00cc\u037c\u0001\u0000\u0000\u0000\u00ce\u037e\u0001\u0000"+ + "\u0000\u0000\u00d0\u0382\u0001\u0000\u0000\u0000\u00d2\u0386\u0001\u0000"+ + "\u0000\u0000\u00d4\u038a\u0001\u0000\u0000\u0000\u00d6\u038e\u0001\u0000"+ + "\u0000\u0000\u00d8\u0393\u0001\u0000\u0000\u0000\u00da\u0397\u0001\u0000"+ + "\u0000\u0000\u00dc\u039b\u0001\u0000\u0000\u0000\u00de\u039f\u0001\u0000"+ + "\u0000\u0000\u00e0\u03a2\u0001\u0000\u0000\u0000\u00e2\u03a6\u0001\u0000"+ + "\u0000\u0000\u00e4\u03aa\u0001\u0000\u0000\u0000\u00e6\u03ae\u0001\u0000"+ + "\u0000\u0000\u00e8\u03b2\u0001\u0000\u0000\u0000\u00ea\u03b6\u0001\u0000"+ + "\u0000\u0000\u00ec\u03bb\u0001\u0000\u0000\u0000\u00ee\u03c0\u0001\u0000"+ + "\u0000\u0000\u00f0\u03c7\u0001\u0000\u0000\u0000\u00f2\u03cb\u0001\u0000"+ + "\u0000\u0000\u00f4\u03cf\u0001\u0000\u0000\u0000\u00f6\u03d3\u0001\u0000"+ + "\u0000\u0000\u00f8\u03d7\u0001\u0000\u0000\u0000\u00fa\u03db\u0001\u0000"+ + "\u0000\u0000\u00fc\u03e1\u0001\u0000\u0000\u0000\u00fe\u03e5\u0001\u0000"+ + "\u0000\u0000\u0100\u03e9\u0001\u0000\u0000\u0000\u0102\u03ed\u0001\u0000"+ + "\u0000\u0000\u0104\u03f1\u0001\u0000\u0000\u0000\u0106\u03f5\u0001\u0000"+ + "\u0000\u0000\u0108\u03f9\u0001\u0000\u0000\u0000\u010a\u03fd\u0001\u0000"+ + "\u0000\u0000\u010c\u0401\u0001\u0000\u0000\u0000\u010e\u0405\u0001\u0000"+ + "\u0000\u0000\u0110\u040a\u0001\u0000\u0000\u0000\u0112\u040e\u0001\u0000"+ + "\u0000\u0000\u0114\u0412\u0001\u0000\u0000\u0000\u0116\u0416\u0001\u0000"+ + "\u0000\u0000\u0118\u041a\u0001\u0000\u0000\u0000\u011a\u041e\u0001\u0000"+ + "\u0000\u0000\u011c\u0422\u0001\u0000\u0000\u0000\u011e\u0427\u0001\u0000"+ + "\u0000\u0000\u0120\u042c\u0001\u0000\u0000\u0000\u0122\u0436\u0001\u0000"+ + "\u0000\u0000\u0124\u043a\u0001\u0000\u0000\u0000\u0126\u043e\u0001\u0000"+ + "\u0000\u0000\u0128\u0129\u0005d\u0000\u0000\u0129\u012a\u0005i\u0000\u0000"+ + "\u012a\u012b\u0005s\u0000\u0000\u012b\u012c\u0005s\u0000\u0000\u012c\u012d"+ + "\u0005e\u0000\u0000\u012d\u012e\u0005c\u0000\u0000\u012e\u012f\u0005t"+ + "\u0000\u0000\u012f\u0130\u0001\u0000\u0000\u0000\u0130\u0131\u0006\u0000"+ + "\u0000\u0000\u0131\u000b\u0001\u0000\u0000\u0000\u0132\u0133\u0005d\u0000"+ + "\u0000\u0133\u0134\u0005r\u0000\u0000\u0134\u0135\u0005o\u0000\u0000\u0135"+ + "\u0136\u0005p\u0000\u0000\u0136\u0137\u0001\u0000\u0000\u0000\u0137\u0138"+ + "\u0006\u0001\u0001\u0000\u0138\r\u0001\u0000\u0000\u0000\u0139\u013a\u0005"+ + "e\u0000\u0000\u013a\u013b\u0005n\u0000\u0000\u013b\u013c\u0005r\u0000"+ + "\u0000\u013c\u013d\u0005i\u0000\u0000\u013d\u013e\u0005c\u0000\u0000\u013e"+ + "\u013f\u0005h\u0000\u0000\u013f\u0140\u0001\u0000\u0000\u0000\u0140\u0141"+ + "\u0006\u0002\u0002\u0000\u0141\u000f\u0001\u0000\u0000\u0000\u0142\u0143"+ + "\u0005e\u0000\u0000\u0143\u0144\u0005v\u0000\u0000\u0144\u0145\u0005a"+ + "\u0000\u0000\u0145\u0146\u0005l\u0000\u0000\u0146\u0147\u0001\u0000\u0000"+ + "\u0000\u0147\u0148\u0006\u0003\u0000\u0000\u0148\u0011\u0001\u0000\u0000"+ + "\u0000\u0149\u014a\u0005e\u0000\u0000\u014a\u014b\u0005x\u0000\u0000\u014b"+ + "\u014c\u0005p\u0000\u0000\u014c\u014d\u0005l\u0000\u0000\u014d\u014e\u0005"+ + "a\u0000\u0000\u014e\u014f\u0005i\u0000\u0000\u014f\u0150\u0005n\u0000"+ + "\u0000\u0150\u0151\u0001\u0000\u0000\u0000\u0151\u0152\u0006\u0004\u0003"+ + "\u0000\u0152\u0013\u0001\u0000\u0000\u0000\u0153\u0154\u0005f\u0000\u0000"+ + "\u0154\u0155\u0005r\u0000\u0000\u0155\u0156\u0005o\u0000\u0000\u0156\u0157"+ + "\u0005m\u0000\u0000\u0157\u0158\u0001\u0000\u0000\u0000\u0158\u0159\u0006"+ + "\u0005\u0004\u0000\u0159\u0015\u0001\u0000\u0000\u0000\u015a\u015b\u0005"+ + "g\u0000\u0000\u015b\u015c\u0005r\u0000\u0000\u015c\u015d\u0005o\u0000"+ + "\u0000\u015d\u015e\u0005k\u0000\u0000\u015e\u015f\u0001\u0000\u0000\u0000"+ + "\u015f\u0160\u0006\u0006\u0000\u0000\u0160\u0017\u0001\u0000\u0000\u0000"+ + "\u0161\u0162\u0005i\u0000\u0000\u0162\u0163\u0005n\u0000\u0000\u0163\u0164"+ + "\u0005l\u0000\u0000\u0164\u0165\u0005i\u0000\u0000\u0165\u0166\u0005n"+ + "\u0000\u0000\u0166\u0167\u0005e\u0000\u0000\u0167\u0168\u0005s\u0000\u0000"+ + "\u0168\u0169\u0005t\u0000\u0000\u0169\u016a\u0005a\u0000\u0000\u016a\u016b"+ + "\u0005t\u0000\u0000\u016b\u016c\u0005s\u0000\u0000\u016c\u016d\u0001\u0000"+ + "\u0000\u0000\u016d\u016e\u0006\u0007\u0000\u0000\u016e\u0019\u0001\u0000"+ + "\u0000\u0000\u016f\u0170\u0005k\u0000\u0000\u0170\u0171\u0005e\u0000\u0000"+ + "\u0171\u0172\u0005e\u0000\u0000\u0172\u0173\u0005p\u0000\u0000\u0173\u0174"+ + "\u0001\u0000\u0000\u0000\u0174\u0175\u0006\b\u0001\u0000\u0175\u001b\u0001"+ + "\u0000\u0000\u0000\u0176\u0177\u0005l\u0000\u0000\u0177\u0178\u0005i\u0000"+ + "\u0000\u0178\u0179\u0005m\u0000\u0000\u0179\u017a\u0005i\u0000\u0000\u017a"+ + "\u017b\u0005t\u0000\u0000\u017b\u017c\u0001\u0000\u0000\u0000\u017c\u017d"+ + "\u0006\t\u0000\u0000\u017d\u001d\u0001\u0000\u0000\u0000\u017e\u017f\u0005"+ + "m\u0000\u0000\u017f\u0180\u0005v\u0000\u0000\u0180\u0181\u0005_\u0000"+ + "\u0000\u0181\u0182\u0005e\u0000\u0000\u0182\u0183\u0005x\u0000\u0000\u0183"+ + "\u0184\u0005p\u0000\u0000\u0184\u0185\u0005a\u0000\u0000\u0185\u0186\u0005"+ + "n\u0000\u0000\u0186\u0187\u0005d\u0000\u0000\u0187\u0188\u0001\u0000\u0000"+ + "\u0000\u0188\u0189\u0006\n\u0005\u0000\u0189\u001f\u0001\u0000\u0000\u0000"+ + "\u018a\u018b\u0005p\u0000\u0000\u018b\u018c\u0005r\u0000\u0000\u018c\u018d"+ + "\u0005o\u0000\u0000\u018d\u018e\u0005j\u0000\u0000\u018e\u018f\u0005e"+ + "\u0000\u0000\u018f\u0190\u0005c\u0000\u0000\u0190\u0191\u0005t\u0000\u0000"+ + "\u0191\u0192\u0001\u0000\u0000\u0000\u0192\u0193\u0006\u000b\u0001\u0000"+ + "\u0193!\u0001\u0000\u0000\u0000\u0194\u0195\u0005r\u0000\u0000\u0195\u0196"+ + "\u0005e\u0000\u0000\u0196\u0197\u0005n\u0000\u0000\u0197\u0198\u0005a"+ + "\u0000\u0000\u0198\u0199\u0005m\u0000\u0000\u0199\u019a\u0005e\u0000\u0000"+ + "\u019a\u019b\u0001\u0000\u0000\u0000\u019b\u019c\u0006\f\u0006\u0000\u019c"+ + "#\u0001\u0000\u0000\u0000\u019d\u019e\u0005r\u0000\u0000\u019e\u019f\u0005"+ + "o\u0000\u0000\u019f\u01a0\u0005w\u0000\u0000\u01a0\u01a1\u0001\u0000\u0000"+ + "\u0000\u01a1\u01a2\u0006\r\u0000\u0000\u01a2%\u0001\u0000\u0000\u0000"+ + "\u01a3\u01a4\u0005s\u0000\u0000\u01a4\u01a5\u0005h\u0000\u0000\u01a5\u01a6"+ + "\u0005o\u0000\u0000\u01a6\u01a7\u0005w\u0000\u0000\u01a7\u01a8\u0001\u0000"+ + "\u0000\u0000\u01a8\u01a9\u0006\u000e\u0007\u0000\u01a9\'\u0001\u0000\u0000"+ + "\u0000\u01aa\u01ab\u0005s\u0000\u0000\u01ab\u01ac\u0005o\u0000\u0000\u01ac"+ + "\u01ad\u0005r\u0000\u0000\u01ad\u01ae\u0005t\u0000\u0000\u01ae\u01af\u0001"+ + "\u0000\u0000\u0000\u01af\u01b0\u0006\u000f\u0000\u0000\u01b0)\u0001\u0000"+ + "\u0000\u0000\u01b1\u01b2\u0005s\u0000\u0000\u01b2\u01b3\u0005t\u0000\u0000"+ + "\u01b3\u01b4\u0005a\u0000\u0000\u01b4\u01b5\u0005t\u0000\u0000\u01b5\u01b6"+ + "\u0005s\u0000\u0000\u01b6\u01b7\u0001\u0000\u0000\u0000\u01b7\u01b8\u0006"+ + "\u0010\u0000\u0000\u01b8+\u0001\u0000\u0000\u0000\u01b9\u01ba\u0005w\u0000"+ + "\u0000\u01ba\u01bb\u0005h\u0000\u0000\u01bb\u01bc\u0005e\u0000\u0000\u01bc"+ + "\u01bd\u0005r\u0000\u0000\u01bd\u01be\u0005e\u0000\u0000\u01be\u01bf\u0001"+ + "\u0000\u0000\u0000\u01bf\u01c0\u0006\u0011\u0000\u0000\u01c0-\u0001\u0000"+ + "\u0000\u0000\u01c1\u01c3\b\u0000\u0000\u0000\u01c2\u01c1\u0001\u0000\u0000"+ + "\u0000\u01c3\u01c4\u0001\u0000\u0000\u0000\u01c4\u01c2\u0001\u0000\u0000"+ + "\u0000\u01c4\u01c5\u0001\u0000\u0000\u0000\u01c5\u01c6\u0001\u0000\u0000"+ + "\u0000\u01c6\u01c7\u0006\u0012\u0000\u0000\u01c7/\u0001\u0000\u0000\u0000"+ + "\u01c8\u01c9\u0005/\u0000\u0000\u01c9\u01ca\u0005/\u0000\u0000\u01ca\u01ce"+ + "\u0001\u0000\u0000\u0000\u01cb\u01cd\b\u0001\u0000\u0000\u01cc\u01cb\u0001"+ + "\u0000\u0000\u0000\u01cd\u01d0\u0001\u0000\u0000\u0000\u01ce\u01cc\u0001"+ + "\u0000\u0000\u0000\u01ce\u01cf\u0001\u0000\u0000\u0000\u01cf\u01d2\u0001"+ + "\u0000\u0000\u0000\u01d0\u01ce\u0001\u0000\u0000\u0000\u01d1\u01d3\u0005"+ + "\r\u0000\u0000\u01d2\u01d1\u0001\u0000\u0000\u0000\u01d2\u01d3\u0001\u0000"+ + "\u0000\u0000\u01d3\u01d5\u0001\u0000\u0000\u0000\u01d4\u01d6\u0005\n\u0000"+ + "\u0000\u01d5\u01d4\u0001\u0000\u0000\u0000\u01d5\u01d6\u0001\u0000\u0000"+ + "\u0000\u01d6\u01d7\u0001\u0000\u0000\u0000\u01d7\u01d8\u0006\u0013\b\u0000"+ + "\u01d81\u0001\u0000\u0000\u0000\u01d9\u01da\u0005/\u0000\u0000\u01da\u01db"+ + "\u0005*\u0000\u0000\u01db\u01e0\u0001\u0000\u0000\u0000\u01dc\u01df\u0003"+ + "2\u0014\u0000\u01dd\u01df\t\u0000\u0000\u0000\u01de\u01dc\u0001\u0000"+ + "\u0000\u0000\u01de\u01dd\u0001\u0000\u0000\u0000\u01df\u01e2\u0001\u0000"+ + "\u0000\u0000\u01e0\u01e1\u0001\u0000\u0000\u0000\u01e0\u01de\u0001\u0000"+ + "\u0000\u0000\u01e1\u01e3\u0001\u0000\u0000\u0000\u01e2\u01e0\u0001\u0000"+ + "\u0000\u0000\u01e3\u01e4\u0005*\u0000\u0000\u01e4\u01e5\u0005/\u0000\u0000"+ + "\u01e5\u01e6\u0001\u0000\u0000\u0000\u01e6\u01e7\u0006\u0014\b\u0000\u01e7"+ + "3\u0001\u0000\u0000\u0000\u01e8\u01ea\u0007\u0002\u0000\u0000\u01e9\u01e8"+ + "\u0001\u0000\u0000\u0000\u01ea\u01eb\u0001\u0000\u0000\u0000\u01eb\u01e9"+ + "\u0001\u0000\u0000\u0000\u01eb\u01ec\u0001\u0000\u0000\u0000\u01ec\u01ed"+ + "\u0001\u0000\u0000\u0000\u01ed\u01ee\u0006\u0015\b\u0000\u01ee5\u0001"+ + "\u0000\u0000\u0000\u01ef\u01f0\u0003\u009eJ\u0000\u01f0\u01f1\u0001\u0000"+ + "\u0000\u0000\u01f1\u01f2\u0006\u0016\t\u0000\u01f2\u01f3\u0006\u0016\n"+ + "\u0000\u01f37\u0001\u0000\u0000\u0000\u01f4\u01f5\u0003@\u001b\u0000\u01f5"+ + "\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7\u0006\u0017\u000b\u0000\u01f7"+ + "\u01f8\u0006\u0017\f\u0000\u01f89\u0001\u0000\u0000\u0000\u01f9\u01fa"+ + "\u00034\u0015\u0000\u01fa\u01fb\u0001\u0000\u0000\u0000\u01fb\u01fc\u0006"+ + "\u0018\b\u0000\u01fc;\u0001\u0000\u0000\u0000\u01fd\u01fe\u00030\u0013"+ + "\u0000\u01fe\u01ff\u0001\u0000\u0000\u0000\u01ff\u0200\u0006\u0019\b\u0000"+ + "\u0200=\u0001\u0000\u0000\u0000\u0201\u0202\u00032\u0014\u0000\u0202\u0203"+ + "\u0001\u0000\u0000\u0000\u0203\u0204\u0006\u001a\b\u0000\u0204?\u0001"+ + "\u0000\u0000\u0000\u0205\u0206\u0005|\u0000\u0000\u0206\u0207\u0001\u0000"+ + "\u0000\u0000\u0207\u0208\u0006\u001b\f\u0000\u0208A\u0001\u0000\u0000"+ + "\u0000\u0209\u020a\u0007\u0003\u0000\u0000\u020aC\u0001\u0000\u0000\u0000"+ + "\u020b\u020c\u0007\u0004\u0000\u0000\u020cE\u0001\u0000\u0000\u0000\u020d"+ + "\u020e\u0005\\\u0000\u0000\u020e\u020f\u0007\u0005\u0000\u0000\u020fG"+ + "\u0001\u0000\u0000\u0000\u0210\u0211\b\u0006\u0000\u0000\u0211I\u0001"+ + "\u0000\u0000\u0000\u0212\u0214\u0007\u0007\u0000\u0000\u0213\u0215\u0007"+ + "\b\u0000\u0000\u0214\u0213\u0001\u0000\u0000\u0000\u0214\u0215\u0001\u0000"+ + "\u0000\u0000\u0215\u0217\u0001\u0000\u0000\u0000\u0216\u0218\u0003B\u001c"+ + "\u0000\u0217\u0216\u0001\u0000\u0000\u0000\u0218\u0219\u0001\u0000\u0000"+ + "\u0000\u0219\u0217\u0001\u0000\u0000\u0000\u0219\u021a\u0001\u0000\u0000"+ + "\u0000\u021aK\u0001\u0000\u0000\u0000\u021b\u021c\u0005@\u0000\u0000\u021c"+ + "M\u0001\u0000\u0000\u0000\u021d\u021e\u0005`\u0000\u0000\u021eO\u0001"+ + "\u0000\u0000\u0000\u021f\u0223\b\t\u0000\u0000\u0220\u0221\u0005`\u0000"+ + "\u0000\u0221\u0223\u0005`\u0000\u0000\u0222\u021f\u0001\u0000\u0000\u0000"+ + "\u0222\u0220\u0001\u0000\u0000\u0000\u0223Q\u0001\u0000\u0000\u0000\u0224"+ + "\u0225\u0005_\u0000\u0000\u0225S\u0001\u0000\u0000\u0000\u0226\u022a\u0003"+ + "D\u001d\u0000\u0227\u022a\u0003B\u001c\u0000\u0228\u022a\u0003R$\u0000"+ + "\u0229\u0226\u0001\u0000\u0000\u0000\u0229\u0227\u0001\u0000\u0000\u0000"+ + "\u0229\u0228\u0001\u0000\u0000\u0000\u022aU\u0001\u0000\u0000\u0000\u022b"+ + "\u0230\u0005\"\u0000\u0000\u022c\u022f\u0003F\u001e\u0000\u022d\u022f"+ + "\u0003H\u001f\u0000\u022e\u022c\u0001\u0000\u0000\u0000\u022e\u022d\u0001"+ + "\u0000\u0000\u0000\u022f\u0232\u0001\u0000\u0000\u0000\u0230\u022e\u0001"+ + "\u0000\u0000\u0000\u0230\u0231\u0001\u0000\u0000\u0000\u0231\u0233\u0001"+ + "\u0000\u0000\u0000\u0232\u0230\u0001\u0000\u0000\u0000\u0233\u0249\u0005"+ + "\"\u0000\u0000\u0234\u0235\u0005\"\u0000\u0000\u0235\u0236\u0005\"\u0000"+ + "\u0000\u0236\u0237\u0005\"\u0000\u0000\u0237\u023b\u0001\u0000\u0000\u0000"+ + "\u0238\u023a\b\u0001\u0000\u0000\u0239\u0238\u0001\u0000\u0000\u0000\u023a"+ + "\u023d\u0001\u0000\u0000\u0000\u023b\u023c\u0001\u0000\u0000\u0000\u023b"+ + "\u0239\u0001\u0000\u0000\u0000\u023c\u023e\u0001\u0000\u0000\u0000\u023d"+ + "\u023b\u0001\u0000\u0000\u0000\u023e\u023f\u0005\"\u0000\u0000\u023f\u0240"+ + "\u0005\"\u0000\u0000\u0240\u0241\u0005\"\u0000\u0000\u0241\u0243\u0001"+ + "\u0000\u0000\u0000\u0242\u0244\u0005\"\u0000\u0000\u0243\u0242\u0001\u0000"+ + "\u0000\u0000\u0243\u0244\u0001\u0000\u0000\u0000\u0244\u0246\u0001\u0000"+ + "\u0000\u0000\u0245\u0247\u0005\"\u0000\u0000\u0246\u0245\u0001\u0000\u0000"+ + "\u0000\u0246\u0247\u0001\u0000\u0000\u0000\u0247\u0249\u0001\u0000\u0000"+ + "\u0000\u0248\u022b\u0001\u0000\u0000\u0000\u0248\u0234\u0001\u0000\u0000"+ + "\u0000\u0249W\u0001\u0000\u0000\u0000\u024a\u024c\u0003B\u001c\u0000\u024b"+ + "\u024a\u0001\u0000\u0000\u0000\u024c\u024d\u0001\u0000\u0000\u0000\u024d"+ + "\u024b\u0001\u0000\u0000\u0000\u024d\u024e\u0001\u0000\u0000\u0000\u024e"+ + "Y\u0001\u0000\u0000\u0000\u024f\u0251\u0003B\u001c\u0000\u0250\u024f\u0001"+ + "\u0000\u0000\u0000\u0251\u0252\u0001\u0000\u0000\u0000\u0252\u0250\u0001"+ + "\u0000\u0000\u0000\u0252\u0253\u0001\u0000\u0000\u0000\u0253\u0254\u0001"+ + "\u0000\u0000\u0000\u0254\u0258\u0003h/\u0000\u0255\u0257\u0003B\u001c"+ + "\u0000\u0256\u0255\u0001\u0000\u0000\u0000\u0257\u025a\u0001\u0000\u0000"+ + "\u0000\u0258\u0256\u0001\u0000\u0000\u0000\u0258\u0259\u0001\u0000\u0000"+ + "\u0000\u0259\u027a\u0001\u0000\u0000\u0000\u025a\u0258\u0001\u0000\u0000"+ + "\u0000\u025b\u025d\u0003h/\u0000\u025c\u025e\u0003B\u001c\u0000\u025d"+ + "\u025c\u0001\u0000\u0000\u0000\u025e\u025f\u0001\u0000\u0000\u0000\u025f"+ + "\u025d\u0001\u0000\u0000\u0000\u025f\u0260\u0001\u0000\u0000\u0000\u0260"+ + "\u027a\u0001\u0000\u0000\u0000\u0261\u0263\u0003B\u001c\u0000\u0262\u0261"+ + "\u0001\u0000\u0000\u0000\u0263\u0264\u0001\u0000\u0000\u0000\u0264\u0262"+ + "\u0001\u0000\u0000\u0000\u0264\u0265\u0001\u0000\u0000\u0000\u0265\u026d"+ + "\u0001\u0000\u0000\u0000\u0266\u026a\u0003h/\u0000\u0267\u0269\u0003B"+ + "\u001c\u0000\u0268\u0267\u0001\u0000\u0000\u0000\u0269\u026c\u0001\u0000"+ + "\u0000\u0000\u026a\u0268\u0001\u0000\u0000\u0000\u026a\u026b\u0001\u0000"+ + "\u0000\u0000\u026b\u026e\u0001\u0000\u0000\u0000\u026c\u026a\u0001\u0000"+ + "\u0000\u0000\u026d\u0266\u0001\u0000\u0000\u0000\u026d\u026e\u0001\u0000"+ + "\u0000\u0000\u026e\u026f\u0001\u0000\u0000\u0000\u026f\u0270\u0003J \u0000"+ + "\u0270\u027a\u0001\u0000\u0000\u0000\u0271\u0273\u0003h/\u0000\u0272\u0274"+ + "\u0003B\u001c\u0000\u0273\u0272\u0001\u0000\u0000\u0000\u0274\u0275\u0001"+ + "\u0000\u0000\u0000\u0275\u0273\u0001\u0000\u0000\u0000\u0275\u0276\u0001"+ + "\u0000\u0000\u0000\u0276\u0277\u0001\u0000\u0000\u0000\u0277\u0278\u0003"+ + "J \u0000\u0278\u027a\u0001\u0000\u0000\u0000\u0279\u0250\u0001\u0000\u0000"+ + "\u0000\u0279\u025b\u0001\u0000\u0000\u0000\u0279\u0262\u0001\u0000\u0000"+ + "\u0000\u0279\u0271\u0001\u0000\u0000\u0000\u027a[\u0001\u0000\u0000\u0000"+ + "\u027b\u027c\u0005b\u0000\u0000\u027c\u027d\u0005y\u0000\u0000\u027d]"+ + "\u0001\u0000\u0000\u0000\u027e\u027f\u0005a\u0000\u0000\u027f\u0280\u0005"+ + "n\u0000\u0000\u0280\u0281\u0005d\u0000\u0000\u0281_\u0001\u0000\u0000"+ + "\u0000\u0282\u0283\u0005a\u0000\u0000\u0283\u0284\u0005s\u0000\u0000\u0284"+ + "\u0285\u0005c\u0000\u0000\u0285a\u0001\u0000\u0000\u0000\u0286\u0287\u0005"+ + "=\u0000\u0000\u0287c\u0001\u0000\u0000\u0000\u0288\u0289\u0005,\u0000"+ + "\u0000\u0289e\u0001\u0000\u0000\u0000\u028a\u028b\u0005d\u0000\u0000\u028b"+ + "\u028c\u0005e\u0000\u0000\u028c\u028d\u0005s\u0000\u0000\u028d\u028e\u0005"+ + "c\u0000\u0000\u028eg\u0001\u0000\u0000\u0000\u028f\u0290\u0005.\u0000"+ + "\u0000\u0290i\u0001\u0000\u0000\u0000\u0291\u0292\u0005f\u0000\u0000\u0292"+ + "\u0293\u0005a\u0000\u0000\u0293\u0294\u0005l\u0000\u0000\u0294\u0295\u0005"+ + "s\u0000\u0000\u0295\u0296\u0005e\u0000\u0000\u0296k\u0001\u0000\u0000"+ + "\u0000\u0297\u0298\u0005f\u0000\u0000\u0298\u0299\u0005i\u0000\u0000\u0299"+ + "\u029a\u0005r\u0000\u0000\u029a\u029b\u0005s\u0000\u0000\u029b\u029c\u0005"+ + "t\u0000\u0000\u029cm\u0001\u0000\u0000\u0000\u029d\u029e\u0005l\u0000"+ + "\u0000\u029e\u029f\u0005a\u0000\u0000\u029f\u02a0\u0005s\u0000\u0000\u02a0"+ + "\u02a1\u0005t\u0000\u0000\u02a1o\u0001\u0000\u0000\u0000\u02a2\u02a3\u0005"+ + "(\u0000\u0000\u02a3q\u0001\u0000\u0000\u0000\u02a4\u02a5\u0005i\u0000"+ + "\u0000\u02a5\u02a6\u0005n\u0000\u0000\u02a6s\u0001\u0000\u0000\u0000\u02a7"+ + "\u02a8\u0005i\u0000\u0000\u02a8\u02a9\u0005s\u0000\u0000\u02a9u\u0001"+ + "\u0000\u0000\u0000\u02aa\u02ab\u0005l\u0000\u0000\u02ab\u02ac\u0005i\u0000"+ + "\u0000\u02ac\u02ad\u0005k\u0000\u0000\u02ad\u02ae\u0005e\u0000\u0000\u02ae"+ + "w\u0001\u0000\u0000\u0000\u02af\u02b0\u0005n\u0000\u0000\u02b0\u02b1\u0005"+ + "o\u0000\u0000\u02b1\u02b2\u0005t\u0000\u0000\u02b2y\u0001\u0000\u0000"+ + "\u0000\u02b3\u02b4\u0005n\u0000\u0000\u02b4\u02b5\u0005u\u0000\u0000\u02b5"+ + "\u02b6\u0005l\u0000\u0000\u02b6\u02b7\u0005l\u0000\u0000\u02b7{\u0001"+ + "\u0000\u0000\u0000\u02b8\u02b9\u0005n\u0000\u0000\u02b9\u02ba\u0005u\u0000"+ + "\u0000\u02ba\u02bb\u0005l\u0000\u0000\u02bb\u02bc\u0005l\u0000\u0000\u02bc"+ + "\u02bd\u0005s\u0000\u0000\u02bd}\u0001\u0000\u0000\u0000\u02be\u02bf\u0005"+ + "o\u0000\u0000\u02bf\u02c0\u0005r\u0000\u0000\u02c0\u007f\u0001\u0000\u0000"+ + "\u0000\u02c1\u02c2\u0005?\u0000\u0000\u02c2\u0081\u0001\u0000\u0000\u0000"+ + "\u02c3\u02c4\u0005r\u0000\u0000\u02c4\u02c5\u0005l\u0000\u0000\u02c5\u02c6"+ + "\u0005i\u0000\u0000\u02c6\u02c7\u0005k\u0000\u0000\u02c7\u02c8\u0005e"+ + "\u0000\u0000\u02c8\u0083\u0001\u0000\u0000\u0000\u02c9\u02ca\u0005)\u0000"+ + "\u0000\u02ca\u0085\u0001\u0000\u0000\u0000\u02cb\u02cc\u0005t\u0000\u0000"+ + "\u02cc\u02cd\u0005r\u0000\u0000\u02cd\u02ce\u0005u\u0000\u0000\u02ce\u02cf"+ + "\u0005e\u0000\u0000\u02cf\u0087\u0001\u0000\u0000\u0000\u02d0\u02d1\u0005"+ + "=\u0000\u0000\u02d1\u02d2\u0005=\u0000\u0000\u02d2\u0089\u0001\u0000\u0000"+ + "\u0000\u02d3\u02d4\u0005!\u0000\u0000\u02d4\u02d5\u0005=\u0000\u0000\u02d5"+ + "\u008b\u0001\u0000\u0000\u0000\u02d6\u02d7\u0005<\u0000\u0000\u02d7\u008d"+ + "\u0001\u0000\u0000\u0000\u02d8\u02d9\u0005<\u0000\u0000\u02d9\u02da\u0005"+ + "=\u0000\u0000\u02da\u008f\u0001\u0000\u0000\u0000\u02db\u02dc\u0005>\u0000"+ + "\u0000\u02dc\u0091\u0001\u0000\u0000\u0000\u02dd\u02de\u0005>\u0000\u0000"+ + "\u02de\u02df\u0005=\u0000\u0000\u02df\u0093\u0001\u0000\u0000\u0000\u02e0"+ + "\u02e1\u0005+\u0000\u0000\u02e1\u0095\u0001\u0000\u0000\u0000\u02e2\u02e3"+ + "\u0005-\u0000\u0000\u02e3\u0097\u0001\u0000\u0000\u0000\u02e4\u02e5\u0005"+ + "*\u0000\u0000\u02e5\u0099\u0001\u0000\u0000\u0000\u02e6\u02e7\u0005/\u0000"+ + "\u0000\u02e7\u009b\u0001\u0000\u0000\u0000\u02e8\u02e9\u0005%\u0000\u0000"+ + "\u02e9\u009d\u0001\u0000\u0000\u0000\u02ea\u02eb\u0005[\u0000\u0000\u02eb"+ + "\u02ec\u0001\u0000\u0000\u0000\u02ec\u02ed\u0006J\u0000\u0000\u02ed\u02ee"+ + "\u0006J\u0000\u0000\u02ee\u009f\u0001\u0000\u0000\u0000\u02ef\u02f0\u0005"+ + "]\u0000\u0000\u02f0\u02f1\u0001\u0000\u0000\u0000\u02f1\u02f2\u0006K\f"+ + "\u0000\u02f2\u02f3\u0006K\f\u0000\u02f3\u00a1\u0001\u0000\u0000\u0000"+ + "\u02f4\u02f8\u0003D\u001d\u0000\u02f5\u02f7\u0003T%\u0000\u02f6\u02f5"+ + "\u0001\u0000\u0000\u0000\u02f7\u02fa\u0001\u0000\u0000\u0000\u02f8\u02f6"+ + "\u0001\u0000\u0000\u0000\u02f8\u02f9\u0001\u0000\u0000\u0000\u02f9\u0305"+ + "\u0001\u0000\u0000\u0000\u02fa\u02f8\u0001\u0000\u0000\u0000\u02fb\u02fe"+ + "\u0003R$\u0000\u02fc\u02fe\u0003L!\u0000\u02fd\u02fb\u0001\u0000\u0000"+ + "\u0000\u02fd\u02fc\u0001\u0000\u0000\u0000\u02fe\u0300\u0001\u0000\u0000"+ + "\u0000\u02ff\u0301\u0003T%\u0000\u0300\u02ff\u0001\u0000\u0000\u0000\u0301"+ + "\u0302\u0001\u0000\u0000\u0000\u0302\u0300\u0001\u0000\u0000\u0000\u0302"+ + "\u0303\u0001\u0000\u0000\u0000\u0303\u0305\u0001\u0000\u0000\u0000\u0304"+ + "\u02f4\u0001\u0000\u0000\u0000\u0304\u02fd\u0001\u0000\u0000\u0000\u0305"+ + "\u00a3\u0001\u0000\u0000\u0000\u0306\u0308\u0003N\"\u0000\u0307\u0309"+ + "\u0003P#\u0000\u0308\u0307\u0001\u0000\u0000\u0000\u0309\u030a\u0001\u0000"+ + "\u0000\u0000\u030a\u0308\u0001\u0000\u0000\u0000\u030a\u030b\u0001\u0000"+ + "\u0000\u0000\u030b\u030c\u0001\u0000\u0000\u0000\u030c\u030d\u0003N\""+ + "\u0000\u030d\u00a5\u0001\u0000\u0000\u0000\u030e\u030f\u00030\u0013\u0000"+ + "\u030f\u0310\u0001\u0000\u0000\u0000\u0310\u0311\u0006N\b\u0000\u0311"+ + "\u00a7\u0001\u0000\u0000\u0000\u0312\u0313\u00032\u0014\u0000\u0313\u0314"+ + "\u0001\u0000\u0000\u0000\u0314\u0315\u0006O\b\u0000\u0315\u00a9\u0001"+ + "\u0000\u0000\u0000\u0316\u0317\u00034\u0015\u0000\u0317\u0318\u0001\u0000"+ + "\u0000\u0000\u0318\u0319\u0006P\b\u0000\u0319\u00ab\u0001\u0000\u0000"+ + "\u0000\u031a\u031b\u0003@\u001b\u0000\u031b\u031c\u0001\u0000\u0000\u0000"+ + "\u031c\u031d\u0006Q\u000b\u0000\u031d\u031e\u0006Q\f\u0000\u031e\u00ad"+ + "\u0001\u0000\u0000\u0000\u031f\u0320\u0003\u009eJ\u0000\u0320\u0321\u0001"+ + "\u0000\u0000\u0000\u0321\u0322\u0006R\t\u0000\u0322\u0323\u0006R\u0004"+ + "\u0000\u0323\u0324\u0006R\u0004\u0000\u0324\u00af\u0001\u0000\u0000\u0000"+ + "\u0325\u0326\u0003\u00a0K\u0000\u0326\u0327\u0001\u0000\u0000\u0000\u0327"+ + "\u0328\u0006S\r\u0000\u0328\u0329\u0006S\f\u0000\u0329\u032a\u0006S\f"+ + "\u0000\u032a\u00b1\u0001\u0000\u0000\u0000\u032b\u032c\u0003d-\u0000\u032c"+ + "\u032d\u0001\u0000\u0000\u0000\u032d\u032e\u0006T\u000e\u0000\u032e\u00b3"+ + "\u0001\u0000\u0000\u0000\u032f\u0330\u0003b,\u0000\u0330\u0331\u0001\u0000"+ + "\u0000\u0000\u0331\u0332\u0006U\u000f\u0000\u0332\u00b5\u0001\u0000\u0000"+ + "\u0000\u0333\u0334\u0005m\u0000\u0000\u0334\u0335\u0005e\u0000\u0000\u0335"+ + "\u0336\u0005t\u0000\u0000\u0336\u0337\u0005a\u0000\u0000\u0337\u0338\u0005"+ + "d\u0000\u0000\u0338\u0339\u0005a\u0000\u0000\u0339\u033a\u0005t\u0000"+ + "\u0000\u033a\u033b\u0005a\u0000\u0000\u033b\u00b7\u0001\u0000\u0000\u0000"+ + "\u033c\u0340\b\n\u0000\u0000\u033d\u033e\u0005/\u0000\u0000\u033e\u0340"+ + "\b\u000b\u0000\u0000\u033f\u033c\u0001\u0000\u0000\u0000\u033f\u033d\u0001"+ + "\u0000\u0000\u0000\u0340\u00b9\u0001\u0000\u0000\u0000\u0341\u0343\u0003"+ + "\u00b8W\u0000\u0342\u0341\u0001\u0000\u0000\u0000\u0343\u0344\u0001\u0000"+ + "\u0000\u0000\u0344\u0342\u0001\u0000\u0000\u0000\u0344\u0345\u0001\u0000"+ + "\u0000\u0000\u0345\u00bb\u0001\u0000\u0000\u0000\u0346\u0347\u0003\u00a4"+ + "M\u0000\u0347\u0348\u0001\u0000\u0000\u0000\u0348\u0349\u0006Y\u0010\u0000"+ + "\u0349\u00bd\u0001\u0000\u0000\u0000\u034a\u034b\u00030\u0013\u0000\u034b"+ + "\u034c\u0001\u0000\u0000\u0000\u034c\u034d\u0006Z\b\u0000\u034d\u00bf"+ + "\u0001\u0000\u0000\u0000\u034e\u034f\u00032\u0014\u0000\u034f\u0350\u0001"+ + "\u0000\u0000\u0000\u0350\u0351\u0006[\b\u0000\u0351\u00c1\u0001\u0000"+ + "\u0000\u0000\u0352\u0353\u00034\u0015\u0000\u0353\u0354\u0001\u0000\u0000"+ + "\u0000\u0354\u0355\u0006\\\b\u0000\u0355\u00c3\u0001\u0000\u0000\u0000"+ + "\u0356\u0357\u0003@\u001b\u0000\u0357\u0358\u0001\u0000\u0000\u0000\u0358"+ + "\u0359\u0006]\u000b\u0000\u0359\u035a\u0006]\f\u0000\u035a\u00c5\u0001"+ + "\u0000\u0000\u0000\u035b\u035c\u0003h/\u0000\u035c\u035d\u0001\u0000\u0000"+ + "\u0000\u035d\u035e\u0006^\u0011\u0000\u035e\u00c7\u0001\u0000\u0000\u0000"+ + "\u035f\u0360\u0003d-\u0000\u0360\u0361\u0001\u0000\u0000\u0000\u0361\u0362"+ + "\u0006_\u000e\u0000\u0362\u00c9\u0001\u0000\u0000\u0000\u0363\u0368\u0003"+ + "D\u001d\u0000\u0364\u0368\u0003B\u001c\u0000\u0365\u0368\u0003R$\u0000"+ + "\u0366\u0368\u0003\u0098G\u0000\u0367\u0363\u0001\u0000\u0000\u0000\u0367"+ + "\u0364\u0001\u0000\u0000\u0000\u0367\u0365\u0001\u0000\u0000\u0000\u0367"+ + "\u0366\u0001\u0000\u0000\u0000\u0368\u00cb\u0001\u0000\u0000\u0000\u0369"+ + "\u036c\u0003D\u001d\u0000\u036a\u036c\u0003\u0098G\u0000\u036b\u0369\u0001"+ + "\u0000\u0000\u0000\u036b\u036a\u0001\u0000\u0000\u0000\u036c\u0370\u0001"+ + "\u0000\u0000\u0000\u036d\u036f\u0003\u00ca`\u0000\u036e\u036d\u0001\u0000"+ + "\u0000\u0000\u036f\u0372\u0001\u0000\u0000\u0000\u0370\u036e\u0001\u0000"+ + "\u0000\u0000\u0370\u0371\u0001\u0000\u0000\u0000\u0371\u037d\u0001\u0000"+ + "\u0000\u0000\u0372\u0370\u0001\u0000\u0000\u0000\u0373\u0376\u0003R$\u0000"+ + "\u0374\u0376\u0003L!\u0000\u0375\u0373\u0001\u0000\u0000\u0000\u0375\u0374"+ + "\u0001\u0000\u0000\u0000\u0376\u0378\u0001\u0000\u0000\u0000\u0377\u0379"+ + "\u0003\u00ca`\u0000\u0378\u0377\u0001\u0000\u0000\u0000\u0379\u037a\u0001"+ + "\u0000\u0000\u0000\u037a\u0378\u0001\u0000\u0000\u0000\u037a\u037b\u0001"+ + "\u0000\u0000\u0000\u037b\u037d\u0001\u0000\u0000\u0000\u037c\u036b\u0001"+ + "\u0000\u0000\u0000\u037c\u0375\u0001\u0000\u0000\u0000\u037d\u00cd\u0001"+ + "\u0000\u0000\u0000\u037e\u037f\u0003\u00a4M\u0000\u037f\u0380\u0001\u0000"+ + "\u0000\u0000\u0380\u0381\u0006b\u0010\u0000\u0381\u00cf\u0001\u0000\u0000"+ + "\u0000\u0382\u0383\u00030\u0013\u0000\u0383\u0384\u0001\u0000\u0000\u0000"+ + "\u0384\u0385\u0006c\b\u0000\u0385\u00d1\u0001\u0000\u0000\u0000\u0386"+ + "\u0387\u00032\u0014\u0000\u0387\u0388\u0001\u0000\u0000\u0000\u0388\u0389"+ + "\u0006d\b\u0000\u0389\u00d3\u0001\u0000\u0000\u0000\u038a\u038b\u0003"+ + "4\u0015\u0000\u038b\u038c\u0001\u0000\u0000\u0000\u038c\u038d\u0006e\b"+ + "\u0000\u038d\u00d5\u0001\u0000\u0000\u0000\u038e\u038f\u0003@\u001b\u0000"+ + "\u038f\u0390\u0001\u0000\u0000\u0000\u0390\u0391\u0006f\u000b\u0000\u0391"+ + "\u0392\u0006f\f\u0000\u0392\u00d7\u0001\u0000\u0000\u0000\u0393\u0394"+ + "\u0003b,\u0000\u0394\u0395\u0001\u0000\u0000\u0000\u0395\u0396\u0006g"+ + "\u000f\u0000\u0396\u00d9\u0001\u0000\u0000\u0000\u0397\u0398\u0003d-\u0000"+ + "\u0398\u0399\u0001\u0000\u0000\u0000\u0399\u039a\u0006h\u000e\u0000\u039a"+ + "\u00db\u0001\u0000\u0000\u0000\u039b\u039c\u0003h/\u0000\u039c\u039d\u0001"+ + "\u0000\u0000\u0000\u039d\u039e\u0006i\u0011\u0000\u039e\u00dd\u0001\u0000"+ + "\u0000\u0000\u039f\u03a0\u0005a\u0000\u0000\u03a0\u03a1\u0005s\u0000\u0000"+ + "\u03a1\u00df\u0001\u0000\u0000\u0000\u03a2\u03a3\u0003\u00a4M\u0000\u03a3"+ + "\u03a4\u0001\u0000\u0000\u0000\u03a4\u03a5\u0006k\u0010\u0000\u03a5\u00e1"+ + "\u0001\u0000\u0000\u0000\u03a6\u03a7\u0003\u00cca\u0000\u03a7\u03a8\u0001"+ + "\u0000\u0000\u0000\u03a8\u03a9\u0006l\u0012\u0000\u03a9\u00e3\u0001\u0000"+ + "\u0000\u0000\u03aa\u03ab\u00030\u0013\u0000\u03ab\u03ac\u0001\u0000\u0000"+ + "\u0000\u03ac\u03ad\u0006m\b\u0000\u03ad\u00e5\u0001\u0000\u0000\u0000"+ + "\u03ae\u03af\u00032\u0014\u0000\u03af\u03b0\u0001\u0000\u0000\u0000\u03b0"+ + "\u03b1\u0006n\b\u0000\u03b1\u00e7\u0001\u0000\u0000\u0000\u03b2\u03b3"+ + "\u00034\u0015\u0000\u03b3\u03b4\u0001\u0000\u0000\u0000\u03b4\u03b5\u0006"+ + "o\b\u0000\u03b5\u00e9\u0001\u0000\u0000\u0000\u03b6\u03b7\u0003@\u001b"+ + "\u0000\u03b7\u03b8\u0001\u0000\u0000\u0000\u03b8\u03b9\u0006p\u000b\u0000"+ + "\u03b9\u03ba\u0006p\f\u0000\u03ba\u00eb\u0001\u0000\u0000\u0000\u03bb"+ + "\u03bc\u0005o\u0000\u0000\u03bc\u03bd\u0005n\u0000\u0000\u03bd\u03be\u0001"+ + "\u0000\u0000\u0000\u03be\u03bf\u0006q\u0013\u0000\u03bf\u00ed\u0001\u0000"+ + "\u0000\u0000\u03c0\u03c1\u0005w\u0000\u0000\u03c1\u03c2\u0005i\u0000\u0000"+ + "\u03c2\u03c3\u0005t\u0000\u0000\u03c3\u03c4\u0005h\u0000\u0000\u03c4\u03c5"+ + "\u0001\u0000\u0000\u0000\u03c5\u03c6\u0006r\u0013\u0000\u03c6\u00ef\u0001"+ + "\u0000\u0000\u0000\u03c7\u03c8\u0003\u00baX\u0000\u03c8\u03c9\u0001\u0000"+ + "\u0000\u0000\u03c9\u03ca\u0006s\u0014\u0000\u03ca\u00f1\u0001\u0000\u0000"+ + "\u0000\u03cb\u03cc\u0003\u00a4M\u0000\u03cc\u03cd\u0001\u0000\u0000\u0000"+ + "\u03cd\u03ce\u0006t\u0010\u0000\u03ce\u00f3\u0001\u0000\u0000\u0000\u03cf"+ + "\u03d0\u00030\u0013\u0000\u03d0\u03d1\u0001\u0000\u0000\u0000\u03d1\u03d2"+ + "\u0006u\b\u0000\u03d2\u00f5\u0001\u0000\u0000\u0000\u03d3\u03d4\u0003"+ + "2\u0014\u0000\u03d4\u03d5\u0001\u0000\u0000\u0000\u03d5\u03d6\u0006v\b"+ + "\u0000\u03d6\u00f7\u0001\u0000\u0000\u0000\u03d7\u03d8\u00034\u0015\u0000"+ + "\u03d8\u03d9\u0001\u0000\u0000\u0000\u03d9\u03da\u0006w\b\u0000\u03da"+ + "\u00f9\u0001\u0000\u0000\u0000\u03db\u03dc\u0003@\u001b\u0000\u03dc\u03dd"+ + "\u0001\u0000\u0000\u0000\u03dd\u03de\u0006x\u000b\u0000\u03de\u03df\u0006"+ + "x\f\u0000\u03df\u03e0\u0006x\f\u0000\u03e0\u00fb\u0001\u0000\u0000\u0000"+ + "\u03e1\u03e2\u0003b,\u0000\u03e2\u03e3\u0001\u0000\u0000\u0000\u03e3\u03e4"+ + "\u0006y\u000f\u0000\u03e4\u00fd\u0001\u0000\u0000\u0000\u03e5\u03e6\u0003"+ + "d-\u0000\u03e6\u03e7\u0001\u0000\u0000\u0000\u03e7\u03e8\u0006z\u000e"+ + "\u0000\u03e8\u00ff\u0001\u0000\u0000\u0000\u03e9\u03ea\u0003h/\u0000\u03ea"+ + "\u03eb\u0001\u0000\u0000\u0000\u03eb\u03ec\u0006{\u0011\u0000\u03ec\u0101"+ + "\u0001\u0000\u0000\u0000\u03ed\u03ee\u0003\u00eer\u0000\u03ee\u03ef\u0001"+ + "\u0000\u0000\u0000\u03ef\u03f0\u0006|\u0015\u0000\u03f0\u0103\u0001\u0000"+ + "\u0000\u0000\u03f1\u03f2\u0003\u00cca\u0000\u03f2\u03f3\u0001\u0000\u0000"+ + "\u0000\u03f3\u03f4\u0006}\u0012\u0000\u03f4\u0105\u0001\u0000\u0000\u0000"+ + "\u03f5\u03f6\u0003\u00a4M\u0000\u03f6\u03f7\u0001\u0000\u0000\u0000\u03f7"+ + "\u03f8\u0006~\u0010\u0000\u03f8\u0107\u0001\u0000\u0000\u0000\u03f9\u03fa"+ + "\u00030\u0013\u0000\u03fa\u03fb\u0001\u0000\u0000\u0000\u03fb\u03fc\u0006"+ + "\u007f\b\u0000\u03fc\u0109\u0001\u0000\u0000\u0000\u03fd\u03fe\u00032"+ + "\u0014\u0000\u03fe\u03ff\u0001\u0000\u0000\u0000\u03ff\u0400\u0006\u0080"+ + "\b\u0000\u0400\u010b\u0001\u0000\u0000\u0000\u0401\u0402\u00034\u0015"+ + "\u0000\u0402\u0403\u0001\u0000\u0000\u0000\u0403\u0404\u0006\u0081\b\u0000"+ + "\u0404\u010d\u0001\u0000\u0000\u0000\u0405\u0406\u0003@\u001b\u0000\u0406"+ + "\u0407\u0001\u0000\u0000\u0000\u0407\u0408\u0006\u0082\u000b\u0000\u0408"+ + "\u0409\u0006\u0082\f\u0000\u0409\u010f\u0001\u0000\u0000\u0000\u040a\u040b"+ + "\u0003h/\u0000\u040b\u040c\u0001\u0000\u0000\u0000\u040c\u040d\u0006\u0083"+ + "\u0011\u0000\u040d\u0111\u0001\u0000\u0000\u0000\u040e\u040f\u0003\u00a4"+ + "M\u0000\u040f\u0410\u0001\u0000\u0000\u0000\u0410\u0411\u0006\u0084\u0010"+ + "\u0000\u0411\u0113\u0001\u0000\u0000\u0000\u0412\u0413\u0003\u00a2L\u0000"+ + "\u0413\u0414\u0001\u0000\u0000\u0000\u0414\u0415\u0006\u0085\u0016\u0000"+ + "\u0415\u0115\u0001\u0000\u0000\u0000\u0416\u0417\u00030\u0013\u0000\u0417"+ + "\u0418\u0001\u0000\u0000\u0000\u0418\u0419\u0006\u0086\b\u0000\u0419\u0117"+ + "\u0001\u0000\u0000\u0000\u041a\u041b\u00032\u0014\u0000\u041b\u041c\u0001"+ + "\u0000\u0000\u0000\u041c\u041d\u0006\u0087\b\u0000\u041d\u0119\u0001\u0000"+ + "\u0000\u0000\u041e\u041f\u00034\u0015\u0000\u041f\u0420\u0001\u0000\u0000"+ + "\u0000\u0420\u0421\u0006\u0088\b\u0000\u0421\u011b\u0001\u0000\u0000\u0000"+ + "\u0422\u0423\u0003@\u001b\u0000\u0423\u0424\u0001\u0000\u0000\u0000\u0424"+ + "\u0425\u0006\u0089\u000b\u0000\u0425\u0426\u0006\u0089\f\u0000\u0426\u011d"+ + "\u0001\u0000\u0000\u0000\u0427\u0428\u0005i\u0000\u0000\u0428\u0429\u0005"+ + "n\u0000\u0000\u0429\u042a\u0005f\u0000\u0000\u042a\u042b\u0005o\u0000"+ + "\u0000\u042b\u011f\u0001\u0000\u0000\u0000\u042c\u042d\u0005f\u0000\u0000"+ + "\u042d\u042e\u0005u\u0000\u0000\u042e\u042f\u0005n\u0000\u0000\u042f\u0430"+ + "\u0005c\u0000\u0000\u0430\u0431\u0005t\u0000\u0000\u0431\u0432\u0005i"+ + "\u0000\u0000\u0432\u0433\u0005o\u0000\u0000\u0433\u0434\u0005n\u0000\u0000"+ + "\u0434\u0435\u0005s\u0000\u0000\u0435\u0121\u0001\u0000\u0000\u0000\u0436"+ + "\u0437\u00030\u0013\u0000\u0437\u0438\u0001\u0000\u0000\u0000\u0438\u0439"+ + "\u0006\u008c\b\u0000\u0439\u0123\u0001\u0000\u0000\u0000\u043a\u043b\u0003"+ + "2\u0014\u0000\u043b\u043c\u0001\u0000\u0000\u0000\u043c\u043d\u0006\u008d"+ + "\b\u0000\u043d\u0125\u0001\u0000\u0000\u0000\u043e\u043f\u00034\u0015"+ + "\u0000\u043f\u0440\u0001\u0000\u0000\u0000\u0440\u0441\u0006\u008e\b\u0000"+ + "\u0441\u0127\u0001\u0000\u0000\u00001\u0000\u0001\u0002\u0003\u0004\u0005"+ + "\u0006\u0007\b\t\u01c4\u01ce\u01d2\u01d5\u01de\u01e0\u01eb\u0214\u0219"+ + "\u0222\u0229\u022e\u0230\u023b\u0243\u0246\u0248\u024d\u0252\u0258\u025f"+ + "\u0264\u026a\u026d\u0275\u0279\u02f8\u02fd\u0302\u0304\u030a\u033f\u0344"+ + "\u0367\u036b\u0370\u0375\u037a\u037c\u0017\u0005\u0002\u0000\u0005\u0004"+ + "\u0000\u0005\u0006\u0000\u0005\u0001\u0000\u0005\u0003\u0000\u0005\b\u0000"+ + "\u0005\u0005\u0000\u0005\t\u0000\u0000\u0001\u0000\u0007?\u0000\u0005"+ + "\u0000\u0000\u0007\u001a\u0000\u0004\u0000\u0000\u0007@\u0000\u0007\""+ + "\u0000\u0007!\u0000\u0007B\u0000\u0007$\u0000\u0007K\u0000\u0005\u0007"+ + "\u0000\u0007G\u0000\u0007T\u0000\u0007A\u0000"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp index 658e09ca4b190..3acc73b1b592c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.interp @@ -25,15 +25,15 @@ null null null null -null +'|' null null null 'by' 'and' 'asc' -null -null +'=' +',' 'desc' '.' 'false' @@ -51,8 +51,6 @@ null 'rlike' ')' 'true' -'info' -'functions' '==' '!=' '<' @@ -71,8 +69,19 @@ null null null null -'as' 'metadata' +null +null +null +null +null +null +null +null +'as' +null +null +null 'on' 'with' null @@ -81,6 +90,14 @@ null null null null +null +null +null +'info' +'functions' +null +null +null token symbolic names: null @@ -135,8 +152,6 @@ PARAM RLIKE RP TRUE -INFO -FUNCTIONS EQ NEQ LT @@ -155,16 +170,35 @@ QUOTED_IDENTIFIER EXPR_LINE_COMMENT EXPR_MULTILINE_COMMENT EXPR_WS -AS METADATA +FROM_UNQUOTED_IDENTIFIER +FROM_LINE_COMMENT +FROM_MULTILINE_COMMENT +FROM_WS +PROJECT_UNQUOTED_IDENTIFIER +PROJECT_LINE_COMMENT +PROJECT_MULTILINE_COMMENT +PROJECT_WS +AS +RENAME_LINE_COMMENT +RENAME_MULTILINE_COMMENT +RENAME_WS ON WITH -SRC_UNQUOTED_IDENTIFIER -SRC_QUOTED_IDENTIFIER -SRC_LINE_COMMENT -SRC_MULTILINE_COMMENT -SRC_WS -EXPLAIN_PIPE +ENRICH_LINE_COMMENT +ENRICH_MULTILINE_COMMENT +ENRICH_WS +ENRICH_FIELD_LINE_COMMENT +ENRICH_FIELD_MULTILINE_COMMENT +ENRICH_FIELD_WS +MVEXPAND_LINE_COMMENT +MVEXPAND_MULTILINE_COMMENT +MVEXPAND_WS +INFO +FUNCTIONS +SHOW_LINE_COMMENT +SHOW_MULTILINE_COMMENT +SHOW_WS rule names: singleStatement @@ -187,9 +221,11 @@ evalCommand statsCommand inlinestatsCommand grouping -sourceIdentifier +fromIdentifier qualifiedName +qualifiedNamePattern identifier +identifierPattern constant limitCommand sortCommand @@ -217,4 +253,4 @@ enrichWithClause atn: -[4, 1, 81, 505, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 104, 8, 1, 10, 1, 12, 1, 107, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 113, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 128, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 140, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 147, 8, 5, 10, 5, 12, 5, 150, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 157, 8, 5, 1, 5, 1, 5, 3, 5, 161, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 169, 8, 5, 10, 5, 12, 5, 172, 9, 5, 1, 6, 1, 6, 3, 6, 176, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 183, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 188, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 195, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 201, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 209, 8, 8, 10, 8, 12, 8, 212, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 221, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 229, 8, 10, 10, 10, 12, 10, 232, 9, 10, 3, 10, 234, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 244, 8, 12, 10, 12, 12, 12, 247, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 254, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 260, 8, 14, 10, 14, 12, 14, 263, 9, 14, 1, 14, 3, 14, 266, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 273, 8, 15, 10, 15, 12, 15, 276, 9, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 3, 17, 285, 8, 17, 1, 17, 1, 17, 3, 17, 289, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 295, 8, 18, 1, 19, 1, 19, 1, 19, 5, 19, 300, 8, 19, 10, 19, 12, 19, 303, 9, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 5, 21, 310, 8, 21, 10, 21, 12, 21, 313, 9, 21, 1, 22, 1, 22, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 330, 8, 23, 10, 23, 12, 23, 333, 9, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 341, 8, 23, 10, 23, 12, 23, 344, 9, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 1, 23, 5, 23, 352, 8, 23, 10, 23, 12, 23, 355, 9, 23, 1, 23, 1, 23, 3, 23, 359, 8, 23, 1, 24, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 368, 8, 25, 10, 25, 12, 25, 371, 9, 25, 1, 26, 1, 26, 3, 26, 375, 8, 26, 1, 26, 1, 26, 3, 26, 379, 8, 26, 1, 27, 1, 27, 1, 27, 1, 27, 5, 27, 385, 8, 27, 10, 27, 12, 27, 388, 9, 27, 1, 27, 1, 27, 1, 27, 1, 27, 5, 27, 394, 8, 27, 10, 27, 12, 27, 397, 9, 27, 3, 27, 399, 8, 27, 1, 28, 1, 28, 1, 28, 1, 28, 5, 28, 405, 8, 28, 10, 28, 12, 28, 408, 9, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 414, 8, 29, 10, 29, 12, 29, 417, 9, 29, 1, 30, 1, 30, 1, 30, 1, 30, 1, 31, 1, 31, 1, 31, 1, 31, 3, 31, 427, 8, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 34, 1, 34, 1, 34, 5, 34, 439, 8, 34, 10, 34, 12, 34, 442, 9, 34, 1, 35, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 37, 1, 37, 3, 37, 452, 8, 37, 1, 38, 3, 38, 455, 8, 38, 1, 38, 1, 38, 1, 39, 3, 39, 460, 8, 39, 1, 39, 1, 39, 1, 40, 1, 40, 1, 41, 1, 41, 1, 42, 1, 42, 1, 42, 1, 43, 1, 43, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 44, 3, 44, 479, 8, 44, 1, 45, 1, 45, 1, 45, 1, 45, 3, 45, 485, 8, 45, 1, 45, 1, 45, 1, 45, 1, 45, 5, 45, 491, 8, 45, 10, 45, 12, 45, 494, 9, 45, 3, 45, 496, 8, 45, 1, 46, 1, 46, 1, 46, 3, 46, 501, 8, 46, 1, 46, 1, 46, 1, 46, 0, 3, 2, 10, 16, 47, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 0, 8, 1, 0, 60, 61, 1, 0, 62, 64, 1, 0, 76, 77, 1, 0, 67, 68, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 1, 0, 54, 59, 535, 0, 94, 1, 0, 0, 0, 2, 97, 1, 0, 0, 0, 4, 112, 1, 0, 0, 0, 6, 127, 1, 0, 0, 0, 8, 129, 1, 0, 0, 0, 10, 160, 1, 0, 0, 0, 12, 187, 1, 0, 0, 0, 14, 194, 1, 0, 0, 0, 16, 200, 1, 0, 0, 0, 18, 220, 1, 0, 0, 0, 20, 222, 1, 0, 0, 0, 22, 237, 1, 0, 0, 0, 24, 240, 1, 0, 0, 0, 26, 253, 1, 0, 0, 0, 28, 255, 1, 0, 0, 0, 30, 267, 1, 0, 0, 0, 32, 279, 1, 0, 0, 0, 34, 282, 1, 0, 0, 0, 36, 290, 1, 0, 0, 0, 38, 296, 1, 0, 0, 0, 40, 304, 1, 0, 0, 0, 42, 306, 1, 0, 0, 0, 44, 314, 1, 0, 0, 0, 46, 358, 1, 0, 0, 0, 48, 360, 1, 0, 0, 0, 50, 363, 1, 0, 0, 0, 52, 372, 1, 0, 0, 0, 54, 398, 1, 0, 0, 0, 56, 400, 1, 0, 0, 0, 58, 409, 1, 0, 0, 0, 60, 418, 1, 0, 0, 0, 62, 422, 1, 0, 0, 0, 64, 428, 1, 0, 0, 0, 66, 432, 1, 0, 0, 0, 68, 435, 1, 0, 0, 0, 70, 443, 1, 0, 0, 0, 72, 447, 1, 0, 0, 0, 74, 451, 1, 0, 0, 0, 76, 454, 1, 0, 0, 0, 78, 459, 1, 0, 0, 0, 80, 463, 1, 0, 0, 0, 82, 465, 1, 0, 0, 0, 84, 467, 1, 0, 0, 0, 86, 470, 1, 0, 0, 0, 88, 478, 1, 0, 0, 0, 90, 480, 1, 0, 0, 0, 92, 500, 1, 0, 0, 0, 94, 95, 3, 2, 1, 0, 95, 96, 5, 0, 0, 1, 96, 1, 1, 0, 0, 0, 97, 98, 6, 1, -1, 0, 98, 99, 3, 4, 2, 0, 99, 105, 1, 0, 0, 0, 100, 101, 10, 1, 0, 0, 101, 102, 5, 26, 0, 0, 102, 104, 3, 6, 3, 0, 103, 100, 1, 0, 0, 0, 104, 107, 1, 0, 0, 0, 105, 103, 1, 0, 0, 0, 105, 106, 1, 0, 0, 0, 106, 3, 1, 0, 0, 0, 107, 105, 1, 0, 0, 0, 108, 113, 3, 84, 42, 0, 109, 113, 3, 28, 14, 0, 110, 113, 3, 22, 11, 0, 111, 113, 3, 88, 44, 0, 112, 108, 1, 0, 0, 0, 112, 109, 1, 0, 0, 0, 112, 110, 1, 0, 0, 0, 112, 111, 1, 0, 0, 0, 113, 5, 1, 0, 0, 0, 114, 128, 3, 32, 16, 0, 115, 128, 3, 36, 18, 0, 116, 128, 3, 48, 24, 0, 117, 128, 3, 54, 27, 0, 118, 128, 3, 50, 25, 0, 119, 128, 3, 34, 17, 0, 120, 128, 3, 8, 4, 0, 121, 128, 3, 56, 28, 0, 122, 128, 3, 58, 29, 0, 123, 128, 3, 62, 31, 0, 124, 128, 3, 64, 32, 0, 125, 128, 3, 90, 45, 0, 126, 128, 3, 66, 33, 0, 127, 114, 1, 0, 0, 0, 127, 115, 1, 0, 0, 0, 127, 116, 1, 0, 0, 0, 127, 117, 1, 0, 0, 0, 127, 118, 1, 0, 0, 0, 127, 119, 1, 0, 0, 0, 127, 120, 1, 0, 0, 0, 127, 121, 1, 0, 0, 0, 127, 122, 1, 0, 0, 0, 127, 123, 1, 0, 0, 0, 127, 124, 1, 0, 0, 0, 127, 125, 1, 0, 0, 0, 127, 126, 1, 0, 0, 0, 128, 7, 1, 0, 0, 0, 129, 130, 5, 18, 0, 0, 130, 131, 3, 10, 5, 0, 131, 9, 1, 0, 0, 0, 132, 133, 6, 5, -1, 0, 133, 134, 5, 44, 0, 0, 134, 161, 3, 10, 5, 7, 135, 161, 3, 14, 7, 0, 136, 161, 3, 12, 6, 0, 137, 139, 3, 14, 7, 0, 138, 140, 5, 44, 0, 0, 139, 138, 1, 0, 0, 0, 139, 140, 1, 0, 0, 0, 140, 141, 1, 0, 0, 0, 141, 142, 5, 41, 0, 0, 142, 143, 5, 40, 0, 0, 143, 148, 3, 14, 7, 0, 144, 145, 5, 34, 0, 0, 145, 147, 3, 14, 7, 0, 146, 144, 1, 0, 0, 0, 147, 150, 1, 0, 0, 0, 148, 146, 1, 0, 0, 0, 148, 149, 1, 0, 0, 0, 149, 151, 1, 0, 0, 0, 150, 148, 1, 0, 0, 0, 151, 152, 5, 50, 0, 0, 152, 161, 1, 0, 0, 0, 153, 154, 3, 14, 7, 0, 154, 156, 5, 42, 0, 0, 155, 157, 5, 44, 0, 0, 156, 155, 1, 0, 0, 0, 156, 157, 1, 0, 0, 0, 157, 158, 1, 0, 0, 0, 158, 159, 5, 45, 0, 0, 159, 161, 1, 0, 0, 0, 160, 132, 1, 0, 0, 0, 160, 135, 1, 0, 0, 0, 160, 136, 1, 0, 0, 0, 160, 137, 1, 0, 0, 0, 160, 153, 1, 0, 0, 0, 161, 170, 1, 0, 0, 0, 162, 163, 10, 4, 0, 0, 163, 164, 5, 31, 0, 0, 164, 169, 3, 10, 5, 5, 165, 166, 10, 3, 0, 0, 166, 167, 5, 47, 0, 0, 167, 169, 3, 10, 5, 4, 168, 162, 1, 0, 0, 0, 168, 165, 1, 0, 0, 0, 169, 172, 1, 0, 0, 0, 170, 168, 1, 0, 0, 0, 170, 171, 1, 0, 0, 0, 171, 11, 1, 0, 0, 0, 172, 170, 1, 0, 0, 0, 173, 175, 3, 14, 7, 0, 174, 176, 5, 44, 0, 0, 175, 174, 1, 0, 0, 0, 175, 176, 1, 0, 0, 0, 176, 177, 1, 0, 0, 0, 177, 178, 5, 43, 0, 0, 178, 179, 3, 80, 40, 0, 179, 188, 1, 0, 0, 0, 180, 182, 3, 14, 7, 0, 181, 183, 5, 44, 0, 0, 182, 181, 1, 0, 0, 0, 182, 183, 1, 0, 0, 0, 183, 184, 1, 0, 0, 0, 184, 185, 5, 49, 0, 0, 185, 186, 3, 80, 40, 0, 186, 188, 1, 0, 0, 0, 187, 173, 1, 0, 0, 0, 187, 180, 1, 0, 0, 0, 188, 13, 1, 0, 0, 0, 189, 195, 3, 16, 8, 0, 190, 191, 3, 16, 8, 0, 191, 192, 3, 82, 41, 0, 192, 193, 3, 16, 8, 0, 193, 195, 1, 0, 0, 0, 194, 189, 1, 0, 0, 0, 194, 190, 1, 0, 0, 0, 195, 15, 1, 0, 0, 0, 196, 197, 6, 8, -1, 0, 197, 201, 3, 18, 9, 0, 198, 199, 7, 0, 0, 0, 199, 201, 3, 16, 8, 3, 200, 196, 1, 0, 0, 0, 200, 198, 1, 0, 0, 0, 201, 210, 1, 0, 0, 0, 202, 203, 10, 2, 0, 0, 203, 204, 7, 1, 0, 0, 204, 209, 3, 16, 8, 3, 205, 206, 10, 1, 0, 0, 206, 207, 7, 0, 0, 0, 207, 209, 3, 16, 8, 2, 208, 202, 1, 0, 0, 0, 208, 205, 1, 0, 0, 0, 209, 212, 1, 0, 0, 0, 210, 208, 1, 0, 0, 0, 210, 211, 1, 0, 0, 0, 211, 17, 1, 0, 0, 0, 212, 210, 1, 0, 0, 0, 213, 221, 3, 46, 23, 0, 214, 221, 3, 42, 21, 0, 215, 221, 3, 20, 10, 0, 216, 217, 5, 40, 0, 0, 217, 218, 3, 10, 5, 0, 218, 219, 5, 50, 0, 0, 219, 221, 1, 0, 0, 0, 220, 213, 1, 0, 0, 0, 220, 214, 1, 0, 0, 0, 220, 215, 1, 0, 0, 0, 220, 216, 1, 0, 0, 0, 221, 19, 1, 0, 0, 0, 222, 223, 3, 44, 22, 0, 223, 233, 5, 40, 0, 0, 224, 234, 5, 62, 0, 0, 225, 230, 3, 10, 5, 0, 226, 227, 5, 34, 0, 0, 227, 229, 3, 10, 5, 0, 228, 226, 1, 0, 0, 0, 229, 232, 1, 0, 0, 0, 230, 228, 1, 0, 0, 0, 230, 231, 1, 0, 0, 0, 231, 234, 1, 0, 0, 0, 232, 230, 1, 0, 0, 0, 233, 224, 1, 0, 0, 0, 233, 225, 1, 0, 0, 0, 233, 234, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 236, 5, 50, 0, 0, 236, 21, 1, 0, 0, 0, 237, 238, 5, 14, 0, 0, 238, 239, 3, 24, 12, 0, 239, 23, 1, 0, 0, 0, 240, 245, 3, 26, 13, 0, 241, 242, 5, 34, 0, 0, 242, 244, 3, 26, 13, 0, 243, 241, 1, 0, 0, 0, 244, 247, 1, 0, 0, 0, 245, 243, 1, 0, 0, 0, 245, 246, 1, 0, 0, 0, 246, 25, 1, 0, 0, 0, 247, 245, 1, 0, 0, 0, 248, 254, 3, 10, 5, 0, 249, 250, 3, 42, 21, 0, 250, 251, 5, 33, 0, 0, 251, 252, 3, 10, 5, 0, 252, 254, 1, 0, 0, 0, 253, 248, 1, 0, 0, 0, 253, 249, 1, 0, 0, 0, 254, 27, 1, 0, 0, 0, 255, 256, 5, 6, 0, 0, 256, 261, 3, 40, 20, 0, 257, 258, 5, 34, 0, 0, 258, 260, 3, 40, 20, 0, 259, 257, 1, 0, 0, 0, 260, 263, 1, 0, 0, 0, 261, 259, 1, 0, 0, 0, 261, 262, 1, 0, 0, 0, 262, 265, 1, 0, 0, 0, 263, 261, 1, 0, 0, 0, 264, 266, 3, 30, 15, 0, 265, 264, 1, 0, 0, 0, 265, 266, 1, 0, 0, 0, 266, 29, 1, 0, 0, 0, 267, 268, 5, 65, 0, 0, 268, 269, 5, 73, 0, 0, 269, 274, 3, 40, 20, 0, 270, 271, 5, 34, 0, 0, 271, 273, 3, 40, 20, 0, 272, 270, 1, 0, 0, 0, 273, 276, 1, 0, 0, 0, 274, 272, 1, 0, 0, 0, 274, 275, 1, 0, 0, 0, 275, 277, 1, 0, 0, 0, 276, 274, 1, 0, 0, 0, 277, 278, 5, 66, 0, 0, 278, 31, 1, 0, 0, 0, 279, 280, 5, 4, 0, 0, 280, 281, 3, 24, 12, 0, 281, 33, 1, 0, 0, 0, 282, 284, 5, 17, 0, 0, 283, 285, 3, 24, 12, 0, 284, 283, 1, 0, 0, 0, 284, 285, 1, 0, 0, 0, 285, 288, 1, 0, 0, 0, 286, 287, 5, 30, 0, 0, 287, 289, 3, 38, 19, 0, 288, 286, 1, 0, 0, 0, 288, 289, 1, 0, 0, 0, 289, 35, 1, 0, 0, 0, 290, 291, 5, 8, 0, 0, 291, 294, 3, 24, 12, 0, 292, 293, 5, 30, 0, 0, 293, 295, 3, 38, 19, 0, 294, 292, 1, 0, 0, 0, 294, 295, 1, 0, 0, 0, 295, 37, 1, 0, 0, 0, 296, 301, 3, 42, 21, 0, 297, 298, 5, 34, 0, 0, 298, 300, 3, 42, 21, 0, 299, 297, 1, 0, 0, 0, 300, 303, 1, 0, 0, 0, 301, 299, 1, 0, 0, 0, 301, 302, 1, 0, 0, 0, 302, 39, 1, 0, 0, 0, 303, 301, 1, 0, 0, 0, 304, 305, 7, 2, 0, 0, 305, 41, 1, 0, 0, 0, 306, 311, 3, 44, 22, 0, 307, 308, 5, 36, 0, 0, 308, 310, 3, 44, 22, 0, 309, 307, 1, 0, 0, 0, 310, 313, 1, 0, 0, 0, 311, 309, 1, 0, 0, 0, 311, 312, 1, 0, 0, 0, 312, 43, 1, 0, 0, 0, 313, 311, 1, 0, 0, 0, 314, 315, 7, 3, 0, 0, 315, 45, 1, 0, 0, 0, 316, 359, 5, 45, 0, 0, 317, 318, 3, 78, 39, 0, 318, 319, 5, 67, 0, 0, 319, 359, 1, 0, 0, 0, 320, 359, 3, 76, 38, 0, 321, 359, 3, 78, 39, 0, 322, 359, 3, 72, 36, 0, 323, 359, 5, 48, 0, 0, 324, 359, 3, 80, 40, 0, 325, 326, 5, 65, 0, 0, 326, 331, 3, 74, 37, 0, 327, 328, 5, 34, 0, 0, 328, 330, 3, 74, 37, 0, 329, 327, 1, 0, 0, 0, 330, 333, 1, 0, 0, 0, 331, 329, 1, 0, 0, 0, 331, 332, 1, 0, 0, 0, 332, 334, 1, 0, 0, 0, 333, 331, 1, 0, 0, 0, 334, 335, 5, 66, 0, 0, 335, 359, 1, 0, 0, 0, 336, 337, 5, 65, 0, 0, 337, 342, 3, 72, 36, 0, 338, 339, 5, 34, 0, 0, 339, 341, 3, 72, 36, 0, 340, 338, 1, 0, 0, 0, 341, 344, 1, 0, 0, 0, 342, 340, 1, 0, 0, 0, 342, 343, 1, 0, 0, 0, 343, 345, 1, 0, 0, 0, 344, 342, 1, 0, 0, 0, 345, 346, 5, 66, 0, 0, 346, 359, 1, 0, 0, 0, 347, 348, 5, 65, 0, 0, 348, 353, 3, 80, 40, 0, 349, 350, 5, 34, 0, 0, 350, 352, 3, 80, 40, 0, 351, 349, 1, 0, 0, 0, 352, 355, 1, 0, 0, 0, 353, 351, 1, 0, 0, 0, 353, 354, 1, 0, 0, 0, 354, 356, 1, 0, 0, 0, 355, 353, 1, 0, 0, 0, 356, 357, 5, 66, 0, 0, 357, 359, 1, 0, 0, 0, 358, 316, 1, 0, 0, 0, 358, 317, 1, 0, 0, 0, 358, 320, 1, 0, 0, 0, 358, 321, 1, 0, 0, 0, 358, 322, 1, 0, 0, 0, 358, 323, 1, 0, 0, 0, 358, 324, 1, 0, 0, 0, 358, 325, 1, 0, 0, 0, 358, 336, 1, 0, 0, 0, 358, 347, 1, 0, 0, 0, 359, 47, 1, 0, 0, 0, 360, 361, 5, 10, 0, 0, 361, 362, 5, 28, 0, 0, 362, 49, 1, 0, 0, 0, 363, 364, 5, 16, 0, 0, 364, 369, 3, 52, 26, 0, 365, 366, 5, 34, 0, 0, 366, 368, 3, 52, 26, 0, 367, 365, 1, 0, 0, 0, 368, 371, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 369, 370, 1, 0, 0, 0, 370, 51, 1, 0, 0, 0, 371, 369, 1, 0, 0, 0, 372, 374, 3, 10, 5, 0, 373, 375, 7, 4, 0, 0, 374, 373, 1, 0, 0, 0, 374, 375, 1, 0, 0, 0, 375, 378, 1, 0, 0, 0, 376, 377, 5, 46, 0, 0, 377, 379, 7, 5, 0, 0, 378, 376, 1, 0, 0, 0, 378, 379, 1, 0, 0, 0, 379, 53, 1, 0, 0, 0, 380, 381, 5, 9, 0, 0, 381, 386, 3, 40, 20, 0, 382, 383, 5, 34, 0, 0, 383, 385, 3, 40, 20, 0, 384, 382, 1, 0, 0, 0, 385, 388, 1, 0, 0, 0, 386, 384, 1, 0, 0, 0, 386, 387, 1, 0, 0, 0, 387, 399, 1, 0, 0, 0, 388, 386, 1, 0, 0, 0, 389, 390, 5, 12, 0, 0, 390, 395, 3, 40, 20, 0, 391, 392, 5, 34, 0, 0, 392, 394, 3, 40, 20, 0, 393, 391, 1, 0, 0, 0, 394, 397, 1, 0, 0, 0, 395, 393, 1, 0, 0, 0, 395, 396, 1, 0, 0, 0, 396, 399, 1, 0, 0, 0, 397, 395, 1, 0, 0, 0, 398, 380, 1, 0, 0, 0, 398, 389, 1, 0, 0, 0, 399, 55, 1, 0, 0, 0, 400, 401, 5, 2, 0, 0, 401, 406, 3, 40, 20, 0, 402, 403, 5, 34, 0, 0, 403, 405, 3, 40, 20, 0, 404, 402, 1, 0, 0, 0, 405, 408, 1, 0, 0, 0, 406, 404, 1, 0, 0, 0, 406, 407, 1, 0, 0, 0, 407, 57, 1, 0, 0, 0, 408, 406, 1, 0, 0, 0, 409, 410, 5, 13, 0, 0, 410, 415, 3, 60, 30, 0, 411, 412, 5, 34, 0, 0, 412, 414, 3, 60, 30, 0, 413, 411, 1, 0, 0, 0, 414, 417, 1, 0, 0, 0, 415, 413, 1, 0, 0, 0, 415, 416, 1, 0, 0, 0, 416, 59, 1, 0, 0, 0, 417, 415, 1, 0, 0, 0, 418, 419, 3, 40, 20, 0, 419, 420, 5, 72, 0, 0, 420, 421, 3, 40, 20, 0, 421, 61, 1, 0, 0, 0, 422, 423, 5, 1, 0, 0, 423, 424, 3, 18, 9, 0, 424, 426, 3, 80, 40, 0, 425, 427, 3, 68, 34, 0, 426, 425, 1, 0, 0, 0, 426, 427, 1, 0, 0, 0, 427, 63, 1, 0, 0, 0, 428, 429, 5, 7, 0, 0, 429, 430, 3, 18, 9, 0, 430, 431, 3, 80, 40, 0, 431, 65, 1, 0, 0, 0, 432, 433, 5, 11, 0, 0, 433, 434, 3, 40, 20, 0, 434, 67, 1, 0, 0, 0, 435, 440, 3, 70, 35, 0, 436, 437, 5, 34, 0, 0, 437, 439, 3, 70, 35, 0, 438, 436, 1, 0, 0, 0, 439, 442, 1, 0, 0, 0, 440, 438, 1, 0, 0, 0, 440, 441, 1, 0, 0, 0, 441, 69, 1, 0, 0, 0, 442, 440, 1, 0, 0, 0, 443, 444, 3, 44, 22, 0, 444, 445, 5, 33, 0, 0, 445, 446, 3, 46, 23, 0, 446, 71, 1, 0, 0, 0, 447, 448, 7, 6, 0, 0, 448, 73, 1, 0, 0, 0, 449, 452, 3, 76, 38, 0, 450, 452, 3, 78, 39, 0, 451, 449, 1, 0, 0, 0, 451, 450, 1, 0, 0, 0, 452, 75, 1, 0, 0, 0, 453, 455, 7, 0, 0, 0, 454, 453, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, 455, 456, 1, 0, 0, 0, 456, 457, 5, 29, 0, 0, 457, 77, 1, 0, 0, 0, 458, 460, 7, 0, 0, 0, 459, 458, 1, 0, 0, 0, 459, 460, 1, 0, 0, 0, 460, 461, 1, 0, 0, 0, 461, 462, 5, 28, 0, 0, 462, 79, 1, 0, 0, 0, 463, 464, 5, 27, 0, 0, 464, 81, 1, 0, 0, 0, 465, 466, 7, 7, 0, 0, 466, 83, 1, 0, 0, 0, 467, 468, 5, 5, 0, 0, 468, 469, 3, 86, 43, 0, 469, 85, 1, 0, 0, 0, 470, 471, 5, 65, 0, 0, 471, 472, 3, 2, 1, 0, 472, 473, 5, 66, 0, 0, 473, 87, 1, 0, 0, 0, 474, 475, 5, 15, 0, 0, 475, 479, 5, 52, 0, 0, 476, 477, 5, 15, 0, 0, 477, 479, 5, 53, 0, 0, 478, 474, 1, 0, 0, 0, 478, 476, 1, 0, 0, 0, 479, 89, 1, 0, 0, 0, 480, 481, 5, 3, 0, 0, 481, 484, 3, 40, 20, 0, 482, 483, 5, 74, 0, 0, 483, 485, 3, 40, 20, 0, 484, 482, 1, 0, 0, 0, 484, 485, 1, 0, 0, 0, 485, 495, 1, 0, 0, 0, 486, 487, 5, 75, 0, 0, 487, 492, 3, 92, 46, 0, 488, 489, 5, 34, 0, 0, 489, 491, 3, 92, 46, 0, 490, 488, 1, 0, 0, 0, 491, 494, 1, 0, 0, 0, 492, 490, 1, 0, 0, 0, 492, 493, 1, 0, 0, 0, 493, 496, 1, 0, 0, 0, 494, 492, 1, 0, 0, 0, 495, 486, 1, 0, 0, 0, 495, 496, 1, 0, 0, 0, 496, 91, 1, 0, 0, 0, 497, 498, 3, 40, 20, 0, 498, 499, 5, 33, 0, 0, 499, 501, 1, 0, 0, 0, 500, 497, 1, 0, 0, 0, 500, 501, 1, 0, 0, 0, 501, 502, 1, 0, 0, 0, 502, 503, 3, 40, 20, 0, 503, 93, 1, 0, 0, 0, 51, 105, 112, 127, 139, 148, 156, 160, 168, 170, 175, 182, 187, 194, 200, 208, 210, 220, 230, 233, 245, 253, 261, 265, 274, 284, 288, 294, 301, 311, 331, 342, 353, 358, 369, 374, 378, 386, 395, 398, 406, 415, 426, 440, 451, 454, 459, 478, 484, 492, 495, 500] \ No newline at end of file +[4, 1, 98, 519, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 2, 11, 7, 11, 2, 12, 7, 12, 2, 13, 7, 13, 2, 14, 7, 14, 2, 15, 7, 15, 2, 16, 7, 16, 2, 17, 7, 17, 2, 18, 7, 18, 2, 19, 7, 19, 2, 20, 7, 20, 2, 21, 7, 21, 2, 22, 7, 22, 2, 23, 7, 23, 2, 24, 7, 24, 2, 25, 7, 25, 2, 26, 7, 26, 2, 27, 7, 27, 2, 28, 7, 28, 2, 29, 7, 29, 2, 30, 7, 30, 2, 31, 7, 31, 2, 32, 7, 32, 2, 33, 7, 33, 2, 34, 7, 34, 2, 35, 7, 35, 2, 36, 7, 36, 2, 37, 7, 37, 2, 38, 7, 38, 2, 39, 7, 39, 2, 40, 7, 40, 2, 41, 7, 41, 2, 42, 7, 42, 2, 43, 7, 43, 2, 44, 7, 44, 2, 45, 7, 45, 2, 46, 7, 46, 2, 47, 7, 47, 2, 48, 7, 48, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 1, 108, 8, 1, 10, 1, 12, 1, 111, 9, 1, 1, 2, 1, 2, 1, 2, 1, 2, 3, 2, 117, 8, 2, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 1, 3, 3, 3, 132, 8, 3, 1, 4, 1, 4, 1, 4, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 144, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 151, 8, 5, 10, 5, 12, 5, 154, 9, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 3, 5, 161, 8, 5, 1, 5, 1, 5, 3, 5, 165, 8, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 1, 5, 5, 5, 173, 8, 5, 10, 5, 12, 5, 176, 9, 5, 1, 6, 1, 6, 3, 6, 180, 8, 6, 1, 6, 1, 6, 1, 6, 1, 6, 1, 6, 3, 6, 187, 8, 6, 1, 6, 1, 6, 1, 6, 3, 6, 192, 8, 6, 1, 7, 1, 7, 1, 7, 1, 7, 1, 7, 3, 7, 199, 8, 7, 1, 8, 1, 8, 1, 8, 1, 8, 3, 8, 205, 8, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 1, 8, 5, 8, 213, 8, 8, 10, 8, 12, 8, 216, 9, 8, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 1, 9, 3, 9, 225, 8, 9, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 1, 10, 5, 10, 233, 8, 10, 10, 10, 12, 10, 236, 9, 10, 3, 10, 238, 8, 10, 1, 10, 1, 10, 1, 11, 1, 11, 1, 11, 1, 12, 1, 12, 1, 12, 5, 12, 248, 8, 12, 10, 12, 12, 12, 251, 9, 12, 1, 13, 1, 13, 1, 13, 1, 13, 1, 13, 3, 13, 258, 8, 13, 1, 14, 1, 14, 1, 14, 1, 14, 5, 14, 264, 8, 14, 10, 14, 12, 14, 267, 9, 14, 1, 14, 3, 14, 270, 8, 14, 1, 15, 1, 15, 1, 15, 1, 15, 1, 15, 5, 15, 277, 8, 15, 10, 15, 12, 15, 280, 9, 15, 1, 15, 1, 15, 1, 16, 1, 16, 1, 16, 1, 17, 1, 17, 3, 17, 289, 8, 17, 1, 17, 1, 17, 3, 17, 293, 8, 17, 1, 18, 1, 18, 1, 18, 1, 18, 3, 18, 299, 8, 18, 1, 19, 1, 19, 1, 19, 5, 19, 304, 8, 19, 10, 19, 12, 19, 307, 9, 19, 1, 20, 1, 20, 1, 21, 1, 21, 1, 21, 5, 21, 314, 8, 21, 10, 21, 12, 21, 317, 9, 21, 1, 22, 1, 22, 1, 22, 5, 22, 322, 8, 22, 10, 22, 12, 22, 325, 9, 22, 1, 23, 1, 23, 1, 24, 1, 24, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 344, 8, 25, 10, 25, 12, 25, 347, 9, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 355, 8, 25, 10, 25, 12, 25, 358, 9, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 1, 25, 5, 25, 366, 8, 25, 10, 25, 12, 25, 369, 9, 25, 1, 25, 1, 25, 3, 25, 373, 8, 25, 1, 26, 1, 26, 1, 26, 1, 27, 1, 27, 1, 27, 1, 27, 5, 27, 382, 8, 27, 10, 27, 12, 27, 385, 9, 27, 1, 28, 1, 28, 3, 28, 389, 8, 28, 1, 28, 1, 28, 3, 28, 393, 8, 28, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 399, 8, 29, 10, 29, 12, 29, 402, 9, 29, 1, 29, 1, 29, 1, 29, 1, 29, 5, 29, 408, 8, 29, 10, 29, 12, 29, 411, 9, 29, 3, 29, 413, 8, 29, 1, 30, 1, 30, 1, 30, 1, 30, 5, 30, 419, 8, 30, 10, 30, 12, 30, 422, 9, 30, 1, 31, 1, 31, 1, 31, 1, 31, 5, 31, 428, 8, 31, 10, 31, 12, 31, 431, 9, 31, 1, 32, 1, 32, 1, 32, 1, 32, 1, 33, 1, 33, 1, 33, 1, 33, 3, 33, 441, 8, 33, 1, 34, 1, 34, 1, 34, 1, 34, 1, 35, 1, 35, 1, 35, 1, 36, 1, 36, 1, 36, 5, 36, 453, 8, 36, 10, 36, 12, 36, 456, 9, 36, 1, 37, 1, 37, 1, 37, 1, 37, 1, 38, 1, 38, 1, 39, 1, 39, 3, 39, 466, 8, 39, 1, 40, 3, 40, 469, 8, 40, 1, 40, 1, 40, 1, 41, 3, 41, 474, 8, 41, 1, 41, 1, 41, 1, 42, 1, 42, 1, 43, 1, 43, 1, 44, 1, 44, 1, 44, 1, 45, 1, 45, 1, 45, 1, 45, 1, 46, 1, 46, 1, 46, 1, 46, 3, 46, 493, 8, 46, 1, 47, 1, 47, 1, 47, 1, 47, 3, 47, 499, 8, 47, 1, 47, 1, 47, 1, 47, 1, 47, 5, 47, 505, 8, 47, 10, 47, 12, 47, 508, 9, 47, 3, 47, 510, 8, 47, 1, 48, 1, 48, 1, 48, 3, 48, 515, 8, 48, 1, 48, 1, 48, 1, 48, 0, 3, 2, 10, 16, 49, 0, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 34, 36, 38, 40, 42, 44, 46, 48, 50, 52, 54, 56, 58, 60, 62, 64, 66, 68, 70, 72, 74, 76, 78, 80, 82, 84, 86, 88, 90, 92, 94, 96, 0, 9, 1, 0, 58, 59, 1, 0, 60, 62, 2, 0, 66, 66, 71, 71, 1, 0, 65, 66, 2, 0, 66, 66, 75, 75, 2, 0, 32, 32, 35, 35, 1, 0, 38, 39, 2, 0, 37, 37, 51, 51, 1, 0, 52, 57, 548, 0, 98, 1, 0, 0, 0, 2, 101, 1, 0, 0, 0, 4, 116, 1, 0, 0, 0, 6, 131, 1, 0, 0, 0, 8, 133, 1, 0, 0, 0, 10, 164, 1, 0, 0, 0, 12, 191, 1, 0, 0, 0, 14, 198, 1, 0, 0, 0, 16, 204, 1, 0, 0, 0, 18, 224, 1, 0, 0, 0, 20, 226, 1, 0, 0, 0, 22, 241, 1, 0, 0, 0, 24, 244, 1, 0, 0, 0, 26, 257, 1, 0, 0, 0, 28, 259, 1, 0, 0, 0, 30, 271, 1, 0, 0, 0, 32, 283, 1, 0, 0, 0, 34, 286, 1, 0, 0, 0, 36, 294, 1, 0, 0, 0, 38, 300, 1, 0, 0, 0, 40, 308, 1, 0, 0, 0, 42, 310, 1, 0, 0, 0, 44, 318, 1, 0, 0, 0, 46, 326, 1, 0, 0, 0, 48, 328, 1, 0, 0, 0, 50, 372, 1, 0, 0, 0, 52, 374, 1, 0, 0, 0, 54, 377, 1, 0, 0, 0, 56, 386, 1, 0, 0, 0, 58, 412, 1, 0, 0, 0, 60, 414, 1, 0, 0, 0, 62, 423, 1, 0, 0, 0, 64, 432, 1, 0, 0, 0, 66, 436, 1, 0, 0, 0, 68, 442, 1, 0, 0, 0, 70, 446, 1, 0, 0, 0, 72, 449, 1, 0, 0, 0, 74, 457, 1, 0, 0, 0, 76, 461, 1, 0, 0, 0, 78, 465, 1, 0, 0, 0, 80, 468, 1, 0, 0, 0, 82, 473, 1, 0, 0, 0, 84, 477, 1, 0, 0, 0, 86, 479, 1, 0, 0, 0, 88, 481, 1, 0, 0, 0, 90, 484, 1, 0, 0, 0, 92, 492, 1, 0, 0, 0, 94, 494, 1, 0, 0, 0, 96, 514, 1, 0, 0, 0, 98, 99, 3, 2, 1, 0, 99, 100, 5, 0, 0, 1, 100, 1, 1, 0, 0, 0, 101, 102, 6, 1, -1, 0, 102, 103, 3, 4, 2, 0, 103, 109, 1, 0, 0, 0, 104, 105, 10, 1, 0, 0, 105, 106, 5, 26, 0, 0, 106, 108, 3, 6, 3, 0, 107, 104, 1, 0, 0, 0, 108, 111, 1, 0, 0, 0, 109, 107, 1, 0, 0, 0, 109, 110, 1, 0, 0, 0, 110, 3, 1, 0, 0, 0, 111, 109, 1, 0, 0, 0, 112, 117, 3, 88, 44, 0, 113, 117, 3, 28, 14, 0, 114, 117, 3, 22, 11, 0, 115, 117, 3, 92, 46, 0, 116, 112, 1, 0, 0, 0, 116, 113, 1, 0, 0, 0, 116, 114, 1, 0, 0, 0, 116, 115, 1, 0, 0, 0, 117, 5, 1, 0, 0, 0, 118, 132, 3, 32, 16, 0, 119, 132, 3, 36, 18, 0, 120, 132, 3, 52, 26, 0, 121, 132, 3, 58, 29, 0, 122, 132, 3, 54, 27, 0, 123, 132, 3, 34, 17, 0, 124, 132, 3, 8, 4, 0, 125, 132, 3, 60, 30, 0, 126, 132, 3, 62, 31, 0, 127, 132, 3, 66, 33, 0, 128, 132, 3, 68, 34, 0, 129, 132, 3, 94, 47, 0, 130, 132, 3, 70, 35, 0, 131, 118, 1, 0, 0, 0, 131, 119, 1, 0, 0, 0, 131, 120, 1, 0, 0, 0, 131, 121, 1, 0, 0, 0, 131, 122, 1, 0, 0, 0, 131, 123, 1, 0, 0, 0, 131, 124, 1, 0, 0, 0, 131, 125, 1, 0, 0, 0, 131, 126, 1, 0, 0, 0, 131, 127, 1, 0, 0, 0, 131, 128, 1, 0, 0, 0, 131, 129, 1, 0, 0, 0, 131, 130, 1, 0, 0, 0, 132, 7, 1, 0, 0, 0, 133, 134, 5, 18, 0, 0, 134, 135, 3, 10, 5, 0, 135, 9, 1, 0, 0, 0, 136, 137, 6, 5, -1, 0, 137, 138, 5, 44, 0, 0, 138, 165, 3, 10, 5, 7, 139, 165, 3, 14, 7, 0, 140, 165, 3, 12, 6, 0, 141, 143, 3, 14, 7, 0, 142, 144, 5, 44, 0, 0, 143, 142, 1, 0, 0, 0, 143, 144, 1, 0, 0, 0, 144, 145, 1, 0, 0, 0, 145, 146, 5, 41, 0, 0, 146, 147, 5, 40, 0, 0, 147, 152, 3, 14, 7, 0, 148, 149, 5, 34, 0, 0, 149, 151, 3, 14, 7, 0, 150, 148, 1, 0, 0, 0, 151, 154, 1, 0, 0, 0, 152, 150, 1, 0, 0, 0, 152, 153, 1, 0, 0, 0, 153, 155, 1, 0, 0, 0, 154, 152, 1, 0, 0, 0, 155, 156, 5, 50, 0, 0, 156, 165, 1, 0, 0, 0, 157, 158, 3, 14, 7, 0, 158, 160, 5, 42, 0, 0, 159, 161, 5, 44, 0, 0, 160, 159, 1, 0, 0, 0, 160, 161, 1, 0, 0, 0, 161, 162, 1, 0, 0, 0, 162, 163, 5, 45, 0, 0, 163, 165, 1, 0, 0, 0, 164, 136, 1, 0, 0, 0, 164, 139, 1, 0, 0, 0, 164, 140, 1, 0, 0, 0, 164, 141, 1, 0, 0, 0, 164, 157, 1, 0, 0, 0, 165, 174, 1, 0, 0, 0, 166, 167, 10, 4, 0, 0, 167, 168, 5, 31, 0, 0, 168, 173, 3, 10, 5, 5, 169, 170, 10, 3, 0, 0, 170, 171, 5, 47, 0, 0, 171, 173, 3, 10, 5, 4, 172, 166, 1, 0, 0, 0, 172, 169, 1, 0, 0, 0, 173, 176, 1, 0, 0, 0, 174, 172, 1, 0, 0, 0, 174, 175, 1, 0, 0, 0, 175, 11, 1, 0, 0, 0, 176, 174, 1, 0, 0, 0, 177, 179, 3, 14, 7, 0, 178, 180, 5, 44, 0, 0, 179, 178, 1, 0, 0, 0, 179, 180, 1, 0, 0, 0, 180, 181, 1, 0, 0, 0, 181, 182, 5, 43, 0, 0, 182, 183, 3, 84, 42, 0, 183, 192, 1, 0, 0, 0, 184, 186, 3, 14, 7, 0, 185, 187, 5, 44, 0, 0, 186, 185, 1, 0, 0, 0, 186, 187, 1, 0, 0, 0, 187, 188, 1, 0, 0, 0, 188, 189, 5, 49, 0, 0, 189, 190, 3, 84, 42, 0, 190, 192, 1, 0, 0, 0, 191, 177, 1, 0, 0, 0, 191, 184, 1, 0, 0, 0, 192, 13, 1, 0, 0, 0, 193, 199, 3, 16, 8, 0, 194, 195, 3, 16, 8, 0, 195, 196, 3, 86, 43, 0, 196, 197, 3, 16, 8, 0, 197, 199, 1, 0, 0, 0, 198, 193, 1, 0, 0, 0, 198, 194, 1, 0, 0, 0, 199, 15, 1, 0, 0, 0, 200, 201, 6, 8, -1, 0, 201, 205, 3, 18, 9, 0, 202, 203, 7, 0, 0, 0, 203, 205, 3, 16, 8, 3, 204, 200, 1, 0, 0, 0, 204, 202, 1, 0, 0, 0, 205, 214, 1, 0, 0, 0, 206, 207, 10, 2, 0, 0, 207, 208, 7, 1, 0, 0, 208, 213, 3, 16, 8, 3, 209, 210, 10, 1, 0, 0, 210, 211, 7, 0, 0, 0, 211, 213, 3, 16, 8, 2, 212, 206, 1, 0, 0, 0, 212, 209, 1, 0, 0, 0, 213, 216, 1, 0, 0, 0, 214, 212, 1, 0, 0, 0, 214, 215, 1, 0, 0, 0, 215, 17, 1, 0, 0, 0, 216, 214, 1, 0, 0, 0, 217, 225, 3, 50, 25, 0, 218, 225, 3, 42, 21, 0, 219, 225, 3, 20, 10, 0, 220, 221, 5, 40, 0, 0, 221, 222, 3, 10, 5, 0, 222, 223, 5, 50, 0, 0, 223, 225, 1, 0, 0, 0, 224, 217, 1, 0, 0, 0, 224, 218, 1, 0, 0, 0, 224, 219, 1, 0, 0, 0, 224, 220, 1, 0, 0, 0, 225, 19, 1, 0, 0, 0, 226, 227, 3, 46, 23, 0, 227, 237, 5, 40, 0, 0, 228, 238, 5, 60, 0, 0, 229, 234, 3, 10, 5, 0, 230, 231, 5, 34, 0, 0, 231, 233, 3, 10, 5, 0, 232, 230, 1, 0, 0, 0, 233, 236, 1, 0, 0, 0, 234, 232, 1, 0, 0, 0, 234, 235, 1, 0, 0, 0, 235, 238, 1, 0, 0, 0, 236, 234, 1, 0, 0, 0, 237, 228, 1, 0, 0, 0, 237, 229, 1, 0, 0, 0, 237, 238, 1, 0, 0, 0, 238, 239, 1, 0, 0, 0, 239, 240, 5, 50, 0, 0, 240, 21, 1, 0, 0, 0, 241, 242, 5, 14, 0, 0, 242, 243, 3, 24, 12, 0, 243, 23, 1, 0, 0, 0, 244, 249, 3, 26, 13, 0, 245, 246, 5, 34, 0, 0, 246, 248, 3, 26, 13, 0, 247, 245, 1, 0, 0, 0, 248, 251, 1, 0, 0, 0, 249, 247, 1, 0, 0, 0, 249, 250, 1, 0, 0, 0, 250, 25, 1, 0, 0, 0, 251, 249, 1, 0, 0, 0, 252, 258, 3, 10, 5, 0, 253, 254, 3, 42, 21, 0, 254, 255, 5, 33, 0, 0, 255, 256, 3, 10, 5, 0, 256, 258, 1, 0, 0, 0, 257, 252, 1, 0, 0, 0, 257, 253, 1, 0, 0, 0, 258, 27, 1, 0, 0, 0, 259, 260, 5, 6, 0, 0, 260, 265, 3, 40, 20, 0, 261, 262, 5, 34, 0, 0, 262, 264, 3, 40, 20, 0, 263, 261, 1, 0, 0, 0, 264, 267, 1, 0, 0, 0, 265, 263, 1, 0, 0, 0, 265, 266, 1, 0, 0, 0, 266, 269, 1, 0, 0, 0, 267, 265, 1, 0, 0, 0, 268, 270, 3, 30, 15, 0, 269, 268, 1, 0, 0, 0, 269, 270, 1, 0, 0, 0, 270, 29, 1, 0, 0, 0, 271, 272, 5, 63, 0, 0, 272, 273, 5, 70, 0, 0, 273, 278, 3, 40, 20, 0, 274, 275, 5, 34, 0, 0, 275, 277, 3, 40, 20, 0, 276, 274, 1, 0, 0, 0, 277, 280, 1, 0, 0, 0, 278, 276, 1, 0, 0, 0, 278, 279, 1, 0, 0, 0, 279, 281, 1, 0, 0, 0, 280, 278, 1, 0, 0, 0, 281, 282, 5, 64, 0, 0, 282, 31, 1, 0, 0, 0, 283, 284, 5, 4, 0, 0, 284, 285, 3, 24, 12, 0, 285, 33, 1, 0, 0, 0, 286, 288, 5, 17, 0, 0, 287, 289, 3, 24, 12, 0, 288, 287, 1, 0, 0, 0, 288, 289, 1, 0, 0, 0, 289, 292, 1, 0, 0, 0, 290, 291, 5, 30, 0, 0, 291, 293, 3, 38, 19, 0, 292, 290, 1, 0, 0, 0, 292, 293, 1, 0, 0, 0, 293, 35, 1, 0, 0, 0, 294, 295, 5, 8, 0, 0, 295, 298, 3, 24, 12, 0, 296, 297, 5, 30, 0, 0, 297, 299, 3, 38, 19, 0, 298, 296, 1, 0, 0, 0, 298, 299, 1, 0, 0, 0, 299, 37, 1, 0, 0, 0, 300, 305, 3, 42, 21, 0, 301, 302, 5, 34, 0, 0, 302, 304, 3, 42, 21, 0, 303, 301, 1, 0, 0, 0, 304, 307, 1, 0, 0, 0, 305, 303, 1, 0, 0, 0, 305, 306, 1, 0, 0, 0, 306, 39, 1, 0, 0, 0, 307, 305, 1, 0, 0, 0, 308, 309, 7, 2, 0, 0, 309, 41, 1, 0, 0, 0, 310, 315, 3, 46, 23, 0, 311, 312, 5, 36, 0, 0, 312, 314, 3, 46, 23, 0, 313, 311, 1, 0, 0, 0, 314, 317, 1, 0, 0, 0, 315, 313, 1, 0, 0, 0, 315, 316, 1, 0, 0, 0, 316, 43, 1, 0, 0, 0, 317, 315, 1, 0, 0, 0, 318, 323, 3, 48, 24, 0, 319, 320, 5, 36, 0, 0, 320, 322, 3, 48, 24, 0, 321, 319, 1, 0, 0, 0, 322, 325, 1, 0, 0, 0, 323, 321, 1, 0, 0, 0, 323, 324, 1, 0, 0, 0, 324, 45, 1, 0, 0, 0, 325, 323, 1, 0, 0, 0, 326, 327, 7, 3, 0, 0, 327, 47, 1, 0, 0, 0, 328, 329, 7, 4, 0, 0, 329, 49, 1, 0, 0, 0, 330, 373, 5, 45, 0, 0, 331, 332, 3, 82, 41, 0, 332, 333, 5, 65, 0, 0, 333, 373, 1, 0, 0, 0, 334, 373, 3, 80, 40, 0, 335, 373, 3, 82, 41, 0, 336, 373, 3, 76, 38, 0, 337, 373, 5, 48, 0, 0, 338, 373, 3, 84, 42, 0, 339, 340, 5, 63, 0, 0, 340, 345, 3, 78, 39, 0, 341, 342, 5, 34, 0, 0, 342, 344, 3, 78, 39, 0, 343, 341, 1, 0, 0, 0, 344, 347, 1, 0, 0, 0, 345, 343, 1, 0, 0, 0, 345, 346, 1, 0, 0, 0, 346, 348, 1, 0, 0, 0, 347, 345, 1, 0, 0, 0, 348, 349, 5, 64, 0, 0, 349, 373, 1, 0, 0, 0, 350, 351, 5, 63, 0, 0, 351, 356, 3, 76, 38, 0, 352, 353, 5, 34, 0, 0, 353, 355, 3, 76, 38, 0, 354, 352, 1, 0, 0, 0, 355, 358, 1, 0, 0, 0, 356, 354, 1, 0, 0, 0, 356, 357, 1, 0, 0, 0, 357, 359, 1, 0, 0, 0, 358, 356, 1, 0, 0, 0, 359, 360, 5, 64, 0, 0, 360, 373, 1, 0, 0, 0, 361, 362, 5, 63, 0, 0, 362, 367, 3, 84, 42, 0, 363, 364, 5, 34, 0, 0, 364, 366, 3, 84, 42, 0, 365, 363, 1, 0, 0, 0, 366, 369, 1, 0, 0, 0, 367, 365, 1, 0, 0, 0, 367, 368, 1, 0, 0, 0, 368, 370, 1, 0, 0, 0, 369, 367, 1, 0, 0, 0, 370, 371, 5, 64, 0, 0, 371, 373, 1, 0, 0, 0, 372, 330, 1, 0, 0, 0, 372, 331, 1, 0, 0, 0, 372, 334, 1, 0, 0, 0, 372, 335, 1, 0, 0, 0, 372, 336, 1, 0, 0, 0, 372, 337, 1, 0, 0, 0, 372, 338, 1, 0, 0, 0, 372, 339, 1, 0, 0, 0, 372, 350, 1, 0, 0, 0, 372, 361, 1, 0, 0, 0, 373, 51, 1, 0, 0, 0, 374, 375, 5, 10, 0, 0, 375, 376, 5, 28, 0, 0, 376, 53, 1, 0, 0, 0, 377, 378, 5, 16, 0, 0, 378, 383, 3, 56, 28, 0, 379, 380, 5, 34, 0, 0, 380, 382, 3, 56, 28, 0, 381, 379, 1, 0, 0, 0, 382, 385, 1, 0, 0, 0, 383, 381, 1, 0, 0, 0, 383, 384, 1, 0, 0, 0, 384, 55, 1, 0, 0, 0, 385, 383, 1, 0, 0, 0, 386, 388, 3, 10, 5, 0, 387, 389, 7, 5, 0, 0, 388, 387, 1, 0, 0, 0, 388, 389, 1, 0, 0, 0, 389, 392, 1, 0, 0, 0, 390, 391, 5, 46, 0, 0, 391, 393, 7, 6, 0, 0, 392, 390, 1, 0, 0, 0, 392, 393, 1, 0, 0, 0, 393, 57, 1, 0, 0, 0, 394, 395, 5, 9, 0, 0, 395, 400, 3, 44, 22, 0, 396, 397, 5, 34, 0, 0, 397, 399, 3, 44, 22, 0, 398, 396, 1, 0, 0, 0, 399, 402, 1, 0, 0, 0, 400, 398, 1, 0, 0, 0, 400, 401, 1, 0, 0, 0, 401, 413, 1, 0, 0, 0, 402, 400, 1, 0, 0, 0, 403, 404, 5, 12, 0, 0, 404, 409, 3, 44, 22, 0, 405, 406, 5, 34, 0, 0, 406, 408, 3, 44, 22, 0, 407, 405, 1, 0, 0, 0, 408, 411, 1, 0, 0, 0, 409, 407, 1, 0, 0, 0, 409, 410, 1, 0, 0, 0, 410, 413, 1, 0, 0, 0, 411, 409, 1, 0, 0, 0, 412, 394, 1, 0, 0, 0, 412, 403, 1, 0, 0, 0, 413, 59, 1, 0, 0, 0, 414, 415, 5, 2, 0, 0, 415, 420, 3, 44, 22, 0, 416, 417, 5, 34, 0, 0, 417, 419, 3, 44, 22, 0, 418, 416, 1, 0, 0, 0, 419, 422, 1, 0, 0, 0, 420, 418, 1, 0, 0, 0, 420, 421, 1, 0, 0, 0, 421, 61, 1, 0, 0, 0, 422, 420, 1, 0, 0, 0, 423, 424, 5, 13, 0, 0, 424, 429, 3, 64, 32, 0, 425, 426, 5, 34, 0, 0, 426, 428, 3, 64, 32, 0, 427, 425, 1, 0, 0, 0, 428, 431, 1, 0, 0, 0, 429, 427, 1, 0, 0, 0, 429, 430, 1, 0, 0, 0, 430, 63, 1, 0, 0, 0, 431, 429, 1, 0, 0, 0, 432, 433, 3, 44, 22, 0, 433, 434, 5, 79, 0, 0, 434, 435, 3, 44, 22, 0, 435, 65, 1, 0, 0, 0, 436, 437, 5, 1, 0, 0, 437, 438, 3, 18, 9, 0, 438, 440, 3, 84, 42, 0, 439, 441, 3, 72, 36, 0, 440, 439, 1, 0, 0, 0, 440, 441, 1, 0, 0, 0, 441, 67, 1, 0, 0, 0, 442, 443, 5, 7, 0, 0, 443, 444, 3, 18, 9, 0, 444, 445, 3, 84, 42, 0, 445, 69, 1, 0, 0, 0, 446, 447, 5, 11, 0, 0, 447, 448, 3, 42, 21, 0, 448, 71, 1, 0, 0, 0, 449, 454, 3, 74, 37, 0, 450, 451, 5, 34, 0, 0, 451, 453, 3, 74, 37, 0, 452, 450, 1, 0, 0, 0, 453, 456, 1, 0, 0, 0, 454, 452, 1, 0, 0, 0, 454, 455, 1, 0, 0, 0, 455, 73, 1, 0, 0, 0, 456, 454, 1, 0, 0, 0, 457, 458, 3, 46, 23, 0, 458, 459, 5, 33, 0, 0, 459, 460, 3, 50, 25, 0, 460, 75, 1, 0, 0, 0, 461, 462, 7, 7, 0, 0, 462, 77, 1, 0, 0, 0, 463, 466, 3, 80, 40, 0, 464, 466, 3, 82, 41, 0, 465, 463, 1, 0, 0, 0, 465, 464, 1, 0, 0, 0, 466, 79, 1, 0, 0, 0, 467, 469, 7, 0, 0, 0, 468, 467, 1, 0, 0, 0, 468, 469, 1, 0, 0, 0, 469, 470, 1, 0, 0, 0, 470, 471, 5, 29, 0, 0, 471, 81, 1, 0, 0, 0, 472, 474, 7, 0, 0, 0, 473, 472, 1, 0, 0, 0, 473, 474, 1, 0, 0, 0, 474, 475, 1, 0, 0, 0, 475, 476, 5, 28, 0, 0, 476, 83, 1, 0, 0, 0, 477, 478, 5, 27, 0, 0, 478, 85, 1, 0, 0, 0, 479, 480, 7, 8, 0, 0, 480, 87, 1, 0, 0, 0, 481, 482, 5, 5, 0, 0, 482, 483, 3, 90, 45, 0, 483, 89, 1, 0, 0, 0, 484, 485, 5, 63, 0, 0, 485, 486, 3, 2, 1, 0, 486, 487, 5, 64, 0, 0, 487, 91, 1, 0, 0, 0, 488, 489, 5, 15, 0, 0, 489, 493, 5, 94, 0, 0, 490, 491, 5, 15, 0, 0, 491, 493, 5, 95, 0, 0, 492, 488, 1, 0, 0, 0, 492, 490, 1, 0, 0, 0, 493, 93, 1, 0, 0, 0, 494, 495, 5, 3, 0, 0, 495, 498, 3, 40, 20, 0, 496, 497, 5, 83, 0, 0, 497, 499, 3, 44, 22, 0, 498, 496, 1, 0, 0, 0, 498, 499, 1, 0, 0, 0, 499, 509, 1, 0, 0, 0, 500, 501, 5, 84, 0, 0, 501, 506, 3, 96, 48, 0, 502, 503, 5, 34, 0, 0, 503, 505, 3, 96, 48, 0, 504, 502, 1, 0, 0, 0, 505, 508, 1, 0, 0, 0, 506, 504, 1, 0, 0, 0, 506, 507, 1, 0, 0, 0, 507, 510, 1, 0, 0, 0, 508, 506, 1, 0, 0, 0, 509, 500, 1, 0, 0, 0, 509, 510, 1, 0, 0, 0, 510, 95, 1, 0, 0, 0, 511, 512, 3, 44, 22, 0, 512, 513, 5, 33, 0, 0, 513, 515, 1, 0, 0, 0, 514, 511, 1, 0, 0, 0, 514, 515, 1, 0, 0, 0, 515, 516, 1, 0, 0, 0, 516, 517, 3, 44, 22, 0, 517, 97, 1, 0, 0, 0, 52, 109, 116, 131, 143, 152, 160, 164, 172, 174, 179, 186, 191, 198, 204, 212, 214, 224, 234, 237, 249, 257, 265, 269, 278, 288, 292, 298, 305, 315, 323, 345, 356, 367, 372, 383, 388, 392, 400, 409, 412, 420, 429, 440, 454, 465, 468, 473, 492, 498, 506, 509, 514] \ No newline at end of file diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java index d136c346927e6..54ec466de9623 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParser.java @@ -24,12 +24,18 @@ public class EsqlBaseParser extends Parser { PIPE=26, STRING=27, INTEGER_LITERAL=28, DECIMAL_LITERAL=29, BY=30, AND=31, ASC=32, ASSIGN=33, COMMA=34, DESC=35, DOT=36, FALSE=37, FIRST=38, LAST=39, LP=40, IN=41, IS=42, LIKE=43, NOT=44, NULL=45, NULLS=46, OR=47, PARAM=48, - RLIKE=49, RP=50, TRUE=51, INFO=52, FUNCTIONS=53, EQ=54, NEQ=55, LT=56, - LTE=57, GT=58, GTE=59, PLUS=60, MINUS=61, ASTERISK=62, SLASH=63, PERCENT=64, - OPENING_BRACKET=65, CLOSING_BRACKET=66, UNQUOTED_IDENTIFIER=67, QUOTED_IDENTIFIER=68, - EXPR_LINE_COMMENT=69, EXPR_MULTILINE_COMMENT=70, EXPR_WS=71, AS=72, METADATA=73, - ON=74, WITH=75, SRC_UNQUOTED_IDENTIFIER=76, SRC_QUOTED_IDENTIFIER=77, - SRC_LINE_COMMENT=78, SRC_MULTILINE_COMMENT=79, SRC_WS=80, EXPLAIN_PIPE=81; + RLIKE=49, RP=50, TRUE=51, EQ=52, NEQ=53, LT=54, LTE=55, GT=56, GTE=57, + PLUS=58, MINUS=59, ASTERISK=60, SLASH=61, PERCENT=62, OPENING_BRACKET=63, + CLOSING_BRACKET=64, UNQUOTED_IDENTIFIER=65, QUOTED_IDENTIFIER=66, EXPR_LINE_COMMENT=67, + EXPR_MULTILINE_COMMENT=68, EXPR_WS=69, METADATA=70, FROM_UNQUOTED_IDENTIFIER=71, + FROM_LINE_COMMENT=72, FROM_MULTILINE_COMMENT=73, FROM_WS=74, PROJECT_UNQUOTED_IDENTIFIER=75, + PROJECT_LINE_COMMENT=76, PROJECT_MULTILINE_COMMENT=77, PROJECT_WS=78, + AS=79, RENAME_LINE_COMMENT=80, RENAME_MULTILINE_COMMENT=81, RENAME_WS=82, + ON=83, WITH=84, ENRICH_LINE_COMMENT=85, ENRICH_MULTILINE_COMMENT=86, ENRICH_WS=87, + ENRICH_FIELD_LINE_COMMENT=88, ENRICH_FIELD_MULTILINE_COMMENT=89, ENRICH_FIELD_WS=90, + MVEXPAND_LINE_COMMENT=91, MVEXPAND_MULTILINE_COMMENT=92, MVEXPAND_WS=93, + INFO=94, FUNCTIONS=95, SHOW_LINE_COMMENT=96, SHOW_MULTILINE_COMMENT=97, + SHOW_WS=98; public static final int RULE_singleStatement = 0, RULE_query = 1, RULE_sourceCommand = 2, RULE_processingCommand = 3, RULE_whereCommand = 4, RULE_booleanExpression = 5, RULE_regexBooleanExpression = 6, @@ -37,26 +43,28 @@ public class EsqlBaseParser extends Parser { RULE_functionExpression = 10, RULE_rowCommand = 11, RULE_fields = 12, RULE_field = 13, RULE_fromCommand = 14, RULE_metadata = 15, RULE_evalCommand = 16, RULE_statsCommand = 17, RULE_inlinestatsCommand = 18, RULE_grouping = 19, - RULE_sourceIdentifier = 20, RULE_qualifiedName = 21, RULE_identifier = 22, - RULE_constant = 23, RULE_limitCommand = 24, RULE_sortCommand = 25, RULE_orderExpression = 26, - RULE_keepCommand = 27, RULE_dropCommand = 28, RULE_renameCommand = 29, - RULE_renameClause = 30, RULE_dissectCommand = 31, RULE_grokCommand = 32, - RULE_mvExpandCommand = 33, RULE_commandOptions = 34, RULE_commandOption = 35, - RULE_booleanValue = 36, RULE_numericValue = 37, RULE_decimalValue = 38, - RULE_integerValue = 39, RULE_string = 40, RULE_comparisonOperator = 41, - RULE_explainCommand = 42, RULE_subqueryExpression = 43, RULE_showCommand = 44, - RULE_enrichCommand = 45, RULE_enrichWithClause = 46; + RULE_fromIdentifier = 20, RULE_qualifiedName = 21, RULE_qualifiedNamePattern = 22, + RULE_identifier = 23, RULE_identifierPattern = 24, RULE_constant = 25, + RULE_limitCommand = 26, RULE_sortCommand = 27, RULE_orderExpression = 28, + RULE_keepCommand = 29, RULE_dropCommand = 30, RULE_renameCommand = 31, + RULE_renameClause = 32, RULE_dissectCommand = 33, RULE_grokCommand = 34, + RULE_mvExpandCommand = 35, RULE_commandOptions = 36, RULE_commandOption = 37, + RULE_booleanValue = 38, RULE_numericValue = 39, RULE_decimalValue = 40, + RULE_integerValue = 41, RULE_string = 42, RULE_comparisonOperator = 43, + RULE_explainCommand = 44, RULE_subqueryExpression = 45, RULE_showCommand = 46, + RULE_enrichCommand = 47, RULE_enrichWithClause = 48; private static String[] makeRuleNames() { return new String[] { "singleStatement", "query", "sourceCommand", "processingCommand", "whereCommand", "booleanExpression", "regexBooleanExpression", "valueExpression", "operatorExpression", "primaryExpression", "functionExpression", "rowCommand", "fields", "field", "fromCommand", "metadata", "evalCommand", "statsCommand", "inlinestatsCommand", - "grouping", "sourceIdentifier", "qualifiedName", "identifier", "constant", - "limitCommand", "sortCommand", "orderExpression", "keepCommand", "dropCommand", - "renameCommand", "renameClause", "dissectCommand", "grokCommand", "mvExpandCommand", - "commandOptions", "commandOption", "booleanValue", "numericValue", "decimalValue", - "integerValue", "string", "comparisonOperator", "explainCommand", "subqueryExpression", + "grouping", "fromIdentifier", "qualifiedName", "qualifiedNamePattern", + "identifier", "identifierPattern", "constant", "limitCommand", "sortCommand", + "orderExpression", "keepCommand", "dropCommand", "renameCommand", "renameClause", + "dissectCommand", "grokCommand", "mvExpandCommand", "commandOptions", + "commandOption", "booleanValue", "numericValue", "decimalValue", "integerValue", + "string", "comparisonOperator", "explainCommand", "subqueryExpression", "showCommand", "enrichCommand", "enrichWithClause" }; } @@ -67,12 +75,14 @@ private static String[] makeLiteralNames() { null, "'dissect'", "'drop'", "'enrich'", "'eval'", "'explain'", "'from'", "'grok'", "'inlinestats'", "'keep'", "'limit'", "'mv_expand'", "'project'", "'rename'", "'row'", "'show'", "'sort'", "'stats'", "'where'", null, - null, null, null, null, null, null, null, null, null, null, "'by'", "'and'", - "'asc'", null, null, "'desc'", "'.'", "'false'", "'first'", "'last'", - "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", "'or'", - "'?'", "'rlike'", "')'", "'true'", "'info'", "'functions'", "'=='", "'!='", - "'<'", "'<='", "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, - "']'", null, null, null, null, null, "'as'", "'metadata'", "'on'", "'with'" + null, null, null, null, null, null, "'|'", null, null, null, "'by'", + "'and'", "'asc'", "'='", "','", "'desc'", "'.'", "'false'", "'first'", + "'last'", "'('", "'in'", "'is'", "'like'", "'not'", "'null'", "'nulls'", + "'or'", "'?'", "'rlike'", "')'", "'true'", "'=='", "'!='", "'<'", "'<='", + "'>'", "'>='", "'+'", "'-'", "'*'", "'/'", "'%'", null, "']'", null, + null, null, null, null, "'metadata'", null, null, null, null, null, null, + null, null, "'as'", null, null, null, "'on'", "'with'", null, null, null, + null, null, null, null, null, null, "'info'", "'functions'" }; } private static final String[] _LITERAL_NAMES = makeLiteralNames(); @@ -85,12 +95,17 @@ private static String[] makeSymbolicNames() { "PIPE", "STRING", "INTEGER_LITERAL", "DECIMAL_LITERAL", "BY", "AND", "ASC", "ASSIGN", "COMMA", "DESC", "DOT", "FALSE", "FIRST", "LAST", "LP", "IN", "IS", "LIKE", "NOT", "NULL", "NULLS", "OR", "PARAM", "RLIKE", "RP", - "TRUE", "INFO", "FUNCTIONS", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", - "MINUS", "ASTERISK", "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", - "UNQUOTED_IDENTIFIER", "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", - "EXPR_WS", "AS", "METADATA", "ON", "WITH", "SRC_UNQUOTED_IDENTIFIER", - "SRC_QUOTED_IDENTIFIER", "SRC_LINE_COMMENT", "SRC_MULTILINE_COMMENT", - "SRC_WS", "EXPLAIN_PIPE" + "TRUE", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", + "SLASH", "PERCENT", "OPENING_BRACKET", "CLOSING_BRACKET", "UNQUOTED_IDENTIFIER", + "QUOTED_IDENTIFIER", "EXPR_LINE_COMMENT", "EXPR_MULTILINE_COMMENT", "EXPR_WS", + "METADATA", "FROM_UNQUOTED_IDENTIFIER", "FROM_LINE_COMMENT", "FROM_MULTILINE_COMMENT", + "FROM_WS", "PROJECT_UNQUOTED_IDENTIFIER", "PROJECT_LINE_COMMENT", "PROJECT_MULTILINE_COMMENT", + "PROJECT_WS", "AS", "RENAME_LINE_COMMENT", "RENAME_MULTILINE_COMMENT", + "RENAME_WS", "ON", "WITH", "ENRICH_LINE_COMMENT", "ENRICH_MULTILINE_COMMENT", + "ENRICH_WS", "ENRICH_FIELD_LINE_COMMENT", "ENRICH_FIELD_MULTILINE_COMMENT", + "ENRICH_FIELD_WS", "MVEXPAND_LINE_COMMENT", "MVEXPAND_MULTILINE_COMMENT", + "MVEXPAND_WS", "INFO", "FUNCTIONS", "SHOW_LINE_COMMENT", "SHOW_MULTILINE_COMMENT", + "SHOW_WS" }; } private static final String[] _SYMBOLIC_NAMES = makeSymbolicNames(); @@ -177,9 +192,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(94); + setState(98); query(0); - setState(95); + setState(99); match(EOF); } } @@ -275,11 +290,11 @@ private QueryContext query(int _p) throws RecognitionException { _ctx = _localctx; _prevctx = _localctx; - setState(98); + setState(102); sourceCommand(); } _ctx.stop = _input.LT(-1); - setState(105); + setState(109); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -290,16 +305,16 @@ private QueryContext query(int _p) throws RecognitionException { { _localctx = new CompositeQueryContext(new QueryContext(_parentctx, _parentState)); pushNewRecursionContext(_localctx, _startState, RULE_query); - setState(100); + setState(104); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(101); + setState(105); match(PIPE); - setState(102); + setState(106); processingCommand(); } } } - setState(107); + setState(111); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,0,_ctx); } @@ -354,34 +369,34 @@ public final SourceCommandContext sourceCommand() throws RecognitionException { SourceCommandContext _localctx = new SourceCommandContext(_ctx, getState()); enterRule(_localctx, 4, RULE_sourceCommand); try { - setState(112); + setState(116); _errHandler.sync(this); switch (_input.LA(1)) { case EXPLAIN: enterOuterAlt(_localctx, 1); { - setState(108); + setState(112); explainCommand(); } break; case FROM: enterOuterAlt(_localctx, 2); { - setState(109); + setState(113); fromCommand(); } break; case ROW: enterOuterAlt(_localctx, 3); { - setState(110); + setState(114); rowCommand(); } break; case SHOW: enterOuterAlt(_localctx, 4); { - setState(111); + setState(115); showCommand(); } break; @@ -465,27 +480,27 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce ProcessingCommandContext _localctx = new ProcessingCommandContext(_ctx, getState()); enterRule(_localctx, 6, RULE_processingCommand); try { - setState(127); + setState(131); _errHandler.sync(this); switch (_input.LA(1)) { case EVAL: enterOuterAlt(_localctx, 1); { - setState(114); + setState(118); evalCommand(); } break; case INLINESTATS: enterOuterAlt(_localctx, 2); { - setState(115); + setState(119); inlinestatsCommand(); } break; case LIMIT: enterOuterAlt(_localctx, 3); { - setState(116); + setState(120); limitCommand(); } break; @@ -493,70 +508,70 @@ public final ProcessingCommandContext processingCommand() throws RecognitionExce case PROJECT: enterOuterAlt(_localctx, 4); { - setState(117); + setState(121); keepCommand(); } break; case SORT: enterOuterAlt(_localctx, 5); { - setState(118); + setState(122); sortCommand(); } break; case STATS: enterOuterAlt(_localctx, 6); { - setState(119); + setState(123); statsCommand(); } break; case WHERE: enterOuterAlt(_localctx, 7); { - setState(120); + setState(124); whereCommand(); } break; case DROP: enterOuterAlt(_localctx, 8); { - setState(121); + setState(125); dropCommand(); } break; case RENAME: enterOuterAlt(_localctx, 9); { - setState(122); + setState(126); renameCommand(); } break; case DISSECT: enterOuterAlt(_localctx, 10); { - setState(123); + setState(127); dissectCommand(); } break; case GROK: enterOuterAlt(_localctx, 11); { - setState(124); + setState(128); grokCommand(); } break; case ENRICH: enterOuterAlt(_localctx, 12); { - setState(125); + setState(129); enrichCommand(); } break; case MV_EXPAND: enterOuterAlt(_localctx, 13); { - setState(126); + setState(130); mvExpandCommand(); } break; @@ -607,9 +622,9 @@ public final WhereCommandContext whereCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(129); + setState(133); match(WHERE); - setState(130); + setState(134); booleanExpression(0); } } @@ -804,7 +819,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(160); + setState(164); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,6,_ctx) ) { case 1: @@ -813,9 +828,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(133); + setState(137); match(NOT); - setState(134); + setState(138); booleanExpression(7); } break; @@ -824,7 +839,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(135); + setState(139); valueExpression(); } break; @@ -833,7 +848,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new RegexExpressionContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(136); + setState(140); regexBooleanExpression(); } break; @@ -842,41 +857,41 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalInContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(137); + setState(141); valueExpression(); - setState(139); + setState(143); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(138); + setState(142); match(NOT); } } - setState(141); + setState(145); match(IN); - setState(142); + setState(146); match(LP); - setState(143); + setState(147); valueExpression(); - setState(148); + setState(152); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(144); + setState(148); match(COMMA); - setState(145); + setState(149); valueExpression(); } } - setState(150); + setState(154); _errHandler.sync(this); _la = _input.LA(1); } - setState(151); + setState(155); match(RP); } break; @@ -885,27 +900,27 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new IsNullContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(153); + setState(157); valueExpression(); - setState(154); + setState(158); match(IS); - setState(156); + setState(160); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(155); + setState(159); match(NOT); } } - setState(158); + setState(162); match(NULL); } break; } _ctx.stop = _input.LT(-1); - setState(170); + setState(174); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -913,7 +928,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(168); + setState(172); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) { case 1: @@ -921,11 +936,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(162); + setState(166); if (!(precpred(_ctx, 4))) throw new FailedPredicateException(this, "precpred(_ctx, 4)"); - setState(163); + setState(167); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(164); + setState(168); ((LogicalBinaryContext)_localctx).right = booleanExpression(5); } break; @@ -934,18 +949,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(165); + setState(169); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(166); + setState(170); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(167); + setState(171); ((LogicalBinaryContext)_localctx).right = booleanExpression(4); } break; } } } - setState(172); + setState(176); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,8,_ctx); } @@ -1000,48 +1015,48 @@ public final RegexBooleanExpressionContext regexBooleanExpression() throws Recog enterRule(_localctx, 12, RULE_regexBooleanExpression); int _la; try { - setState(187); + setState(191); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,11,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(173); + setState(177); valueExpression(); - setState(175); + setState(179); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(174); + setState(178); match(NOT); } } - setState(177); + setState(181); ((RegexBooleanExpressionContext)_localctx).kind = match(LIKE); - setState(178); + setState(182); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(180); + setState(184); valueExpression(); - setState(182); + setState(186); _errHandler.sync(this); _la = _input.LA(1); if (_la==NOT) { { - setState(181); + setState(185); match(NOT); } } - setState(184); + setState(188); ((RegexBooleanExpressionContext)_localctx).kind = match(RLIKE); - setState(185); + setState(189); ((RegexBooleanExpressionContext)_localctx).pattern = string(); } break; @@ -1127,14 +1142,14 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, getState()); enterRule(_localctx, 14, RULE_valueExpression); try { - setState(194); + setState(198); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,12,_ctx) ) { case 1: _localctx = new ValueExpressionDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(189); + setState(193); operatorExpression(0); } break; @@ -1142,11 +1157,11 @@ public final ValueExpressionContext valueExpression() throws RecognitionExceptio _localctx = new ComparisonContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(190); + setState(194); ((ComparisonContext)_localctx).left = operatorExpression(0); - setState(191); + setState(195); comparisonOperator(); - setState(192); + setState(196); ((ComparisonContext)_localctx).right = operatorExpression(0); } break; @@ -1271,7 +1286,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE int _alt; enterOuterAlt(_localctx, 1); { - setState(200); + setState(204); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,13,_ctx) ) { case 1: @@ -1280,7 +1295,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _ctx = _localctx; _prevctx = _localctx; - setState(197); + setState(201); primaryExpression(); } break; @@ -1289,7 +1304,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(198); + setState(202); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1300,13 +1315,13 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(199); + setState(203); operatorExpression(3); } break; } _ctx.stop = _input.LT(-1); - setState(210); + setState(214); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -1314,7 +1329,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(208); + setState(212); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,14,_ctx) ) { case 1: @@ -1322,12 +1337,12 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(202); + setState(206); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(203); + setState(207); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !((((_la - 62)) & ~0x3f) == 0 && ((1L << (_la - 62)) & 7L) != 0) ) { + if ( !(((_la) & ~0x3f) == 0 && ((1L << _la) & 8070450532247928832L) != 0) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { @@ -1335,7 +1350,7 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(204); + setState(208); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(3); } break; @@ -1344,9 +1359,9 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _localctx = new ArithmeticBinaryContext(new OperatorExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_operatorExpression); - setState(205); + setState(209); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(206); + setState(210); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -1357,14 +1372,14 @@ private OperatorExpressionContext operatorExpression(int _p) throws RecognitionE _errHandler.reportMatch(this); consume(); } - setState(207); + setState(211); ((ArithmeticBinaryContext)_localctx).right = operatorExpression(2); } break; } } } - setState(212); + setState(216); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,15,_ctx); } @@ -1486,14 +1501,14 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, getState()); enterRule(_localctx, 18, RULE_primaryExpression); try { - setState(220); + setState(224); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) { case 1: _localctx = new ConstantDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(213); + setState(217); constant(); } break; @@ -1501,7 +1516,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new DereferenceContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(214); + setState(218); qualifiedName(); } break; @@ -1509,7 +1524,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new FunctionContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(215); + setState(219); functionExpression(); } break; @@ -1517,11 +1532,11 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new ParenthesizedExpressionContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(216); + setState(220); match(LP); - setState(217); + setState(221); booleanExpression(0); - setState(218); + setState(222); match(RP); } break; @@ -1583,16 +1598,16 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(222); + setState(226); identifier(); - setState(223); + setState(227); match(LP); - setState(233); + setState(237); _errHandler.sync(this); switch (_input.LA(1)) { case ASTERISK: { - setState(224); + setState(228); match(ASTERISK); } break; @@ -1612,21 +1627,21 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case QUOTED_IDENTIFIER: { { - setState(225); + setState(229); booleanExpression(0); - setState(230); + setState(234); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(226); + setState(230); match(COMMA); - setState(227); + setState(231); booleanExpression(0); } } - setState(232); + setState(236); _errHandler.sync(this); _la = _input.LA(1); } @@ -1638,7 +1653,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx default: break; } - setState(235); + setState(239); match(RP); } } @@ -1685,9 +1700,9 @@ public final RowCommandContext rowCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(237); + setState(241); match(ROW); - setState(238); + setState(242); fields(); } } @@ -1741,23 +1756,23 @@ public final FieldsContext fields() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(240); + setState(244); field(); - setState(245); + setState(249); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(241); + setState(245); match(COMMA); - setState(242); + setState(246); field(); } } } - setState(247); + setState(251); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,19,_ctx); } @@ -1807,24 +1822,24 @@ public final FieldContext field() throws RecognitionException { FieldContext _localctx = new FieldContext(_ctx, getState()); enterRule(_localctx, 26, RULE_field); try { - setState(253); + setState(257); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,20,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(248); + setState(252); booleanExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(249); + setState(253); qualifiedName(); - setState(250); + setState(254); match(ASSIGN); - setState(251); + setState(255); booleanExpression(0); } break; @@ -1844,11 +1859,11 @@ public final FieldContext field() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class FromCommandContext extends ParserRuleContext { public TerminalNode FROM() { return getToken(EsqlBaseParser.FROM, 0); } - public List sourceIdentifier() { - return getRuleContexts(SourceIdentifierContext.class); + public List fromIdentifier() { + return getRuleContexts(FromIdentifierContext.class); } - public SourceIdentifierContext sourceIdentifier(int i) { - return getRuleContext(SourceIdentifierContext.class,i); + public FromIdentifierContext fromIdentifier(int i) { + return getRuleContext(FromIdentifierContext.class,i); } public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } public TerminalNode COMMA(int i) { @@ -1884,34 +1899,34 @@ public final FromCommandContext fromCommand() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(255); + setState(259); match(FROM); - setState(256); - sourceIdentifier(); - setState(261); + setState(260); + fromIdentifier(); + setState(265); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(257); + setState(261); match(COMMA); - setState(258); - sourceIdentifier(); + setState(262); + fromIdentifier(); } } } - setState(263); + setState(267); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,21,_ctx); } - setState(265); + setState(269); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) { case 1: { - setState(264); + setState(268); metadata(); } break; @@ -1933,11 +1948,11 @@ public final FromCommandContext fromCommand() throws RecognitionException { public static class MetadataContext extends ParserRuleContext { public TerminalNode OPENING_BRACKET() { return getToken(EsqlBaseParser.OPENING_BRACKET, 0); } public TerminalNode METADATA() { return getToken(EsqlBaseParser.METADATA, 0); } - public List sourceIdentifier() { - return getRuleContexts(SourceIdentifierContext.class); + public List fromIdentifier() { + return getRuleContexts(FromIdentifierContext.class); } - public SourceIdentifierContext sourceIdentifier(int i) { - return getRuleContext(SourceIdentifierContext.class,i); + public FromIdentifierContext fromIdentifier(int i) { + return getRuleContext(FromIdentifierContext.class,i); } public TerminalNode CLOSING_BRACKET() { return getToken(EsqlBaseParser.CLOSING_BRACKET, 0); } public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } @@ -1971,29 +1986,29 @@ public final MetadataContext metadata() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(267); + setState(271); match(OPENING_BRACKET); - setState(268); + setState(272); match(METADATA); - setState(269); - sourceIdentifier(); - setState(274); + setState(273); + fromIdentifier(); + setState(278); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(270); + setState(274); match(COMMA); - setState(271); - sourceIdentifier(); + setState(275); + fromIdentifier(); } } - setState(276); + setState(280); _errHandler.sync(this); _la = _input.LA(1); } - setState(277); + setState(281); match(CLOSING_BRACKET); } } @@ -2040,9 +2055,9 @@ public final EvalCommandContext evalCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(279); + setState(283); match(EVAL); - setState(280); + setState(284); fields(); } } @@ -2093,26 +2108,26 @@ public final StatsCommandContext statsCommand() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(282); + setState(286); match(STATS); - setState(284); + setState(288); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,24,_ctx) ) { case 1: { - setState(283); + setState(287); fields(); } break; } - setState(288); + setState(292); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,25,_ctx) ) { case 1: { - setState(286); + setState(290); match(BY); - setState(287); + setState(291); grouping(); } break; @@ -2166,18 +2181,18 @@ public final InlinestatsCommandContext inlinestatsCommand() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(290); + setState(294); match(INLINESTATS); - setState(291); + setState(295); fields(); - setState(294); + setState(298); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,26,_ctx) ) { case 1: { - setState(292); + setState(296); match(BY); - setState(293); + setState(297); grouping(); } break; @@ -2234,23 +2249,23 @@ public final GroupingContext grouping() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(296); + setState(300); qualifiedName(); - setState(301); + setState(305); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(297); + setState(301); match(COMMA); - setState(298); + setState(302); qualifiedName(); } } } - setState(303); + setState(307); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,27,_ctx); } @@ -2268,39 +2283,39 @@ public final GroupingContext grouping() throws RecognitionException { } @SuppressWarnings("CheckReturnValue") - public static class SourceIdentifierContext extends ParserRuleContext { - public TerminalNode SRC_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.SRC_UNQUOTED_IDENTIFIER, 0); } - public TerminalNode SRC_QUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.SRC_QUOTED_IDENTIFIER, 0); } + public static class FromIdentifierContext extends ParserRuleContext { + public TerminalNode FROM_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.FROM_UNQUOTED_IDENTIFIER, 0); } + public TerminalNode QUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.QUOTED_IDENTIFIER, 0); } @SuppressWarnings("this-escape") - public SourceIdentifierContext(ParserRuleContext parent, int invokingState) { + public FromIdentifierContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_sourceIdentifier; } + @Override public int getRuleIndex() { return RULE_fromIdentifier; } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterSourceIdentifier(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterFromIdentifier(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitSourceIdentifier(this); + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitFromIdentifier(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitSourceIdentifier(this); + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitFromIdentifier(this); else return visitor.visitChildren(this); } } - public final SourceIdentifierContext sourceIdentifier() throws RecognitionException { - SourceIdentifierContext _localctx = new SourceIdentifierContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_sourceIdentifier); + public final FromIdentifierContext fromIdentifier() throws RecognitionException { + FromIdentifierContext _localctx = new FromIdentifierContext(_ctx, getState()); + enterRule(_localctx, 40, RULE_fromIdentifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(304); + setState(308); _la = _input.LA(1); - if ( !(_la==SRC_UNQUOTED_IDENTIFIER || _la==SRC_QUOTED_IDENTIFIER) ) { + if ( !(_la==QUOTED_IDENTIFIER || _la==FROM_UNQUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); } else { @@ -2360,23 +2375,23 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { int _alt; enterOuterAlt(_localctx, 1); { - setState(306); + setState(310); identifier(); - setState(311); + setState(315); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(307); + setState(311); match(DOT); - setState(308); + setState(312); identifier(); } } } - setState(313); + setState(317); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,28,_ctx); } @@ -2393,6 +2408,78 @@ public final QualifiedNameContext qualifiedName() throws RecognitionException { return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class QualifiedNamePatternContext extends ParserRuleContext { + public List identifierPattern() { + return getRuleContexts(IdentifierPatternContext.class); + } + public IdentifierPatternContext identifierPattern(int i) { + return getRuleContext(IdentifierPatternContext.class,i); + } + public List DOT() { return getTokens(EsqlBaseParser.DOT); } + public TerminalNode DOT(int i) { + return getToken(EsqlBaseParser.DOT, i); + } + @SuppressWarnings("this-escape") + public QualifiedNamePatternContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_qualifiedNamePattern; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterQualifiedNamePattern(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitQualifiedNamePattern(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitQualifiedNamePattern(this); + else return visitor.visitChildren(this); + } + } + + public final QualifiedNamePatternContext qualifiedNamePattern() throws RecognitionException { + QualifiedNamePatternContext _localctx = new QualifiedNamePatternContext(_ctx, getState()); + enterRule(_localctx, 44, RULE_qualifiedNamePattern); + try { + int _alt; + enterOuterAlt(_localctx, 1); + { + setState(318); + identifierPattern(); + setState(323); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,29,_ctx); + while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { + if ( _alt==1 ) { + { + { + setState(319); + match(DOT); + setState(320); + identifierPattern(); + } + } + } + setState(325); + _errHandler.sync(this); + _alt = getInterpreter().adaptivePredict(_input,29,_ctx); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") public static class IdentifierContext extends ParserRuleContext { public TerminalNode UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.UNQUOTED_IDENTIFIER, 0); } @@ -2419,12 +2506,12 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_identifier); + enterRule(_localctx, 46, RULE_identifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(314); + setState(326); _la = _input.LA(1); if ( !(_la==UNQUOTED_IDENTIFIER || _la==QUOTED_IDENTIFIER) ) { _errHandler.recoverInline(this); @@ -2447,6 +2534,60 @@ public final IdentifierContext identifier() throws RecognitionException { return _localctx; } + @SuppressWarnings("CheckReturnValue") + public static class IdentifierPatternContext extends ParserRuleContext { + public TerminalNode PROJECT_UNQUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.PROJECT_UNQUOTED_IDENTIFIER, 0); } + public TerminalNode QUOTED_IDENTIFIER() { return getToken(EsqlBaseParser.QUOTED_IDENTIFIER, 0); } + @SuppressWarnings("this-escape") + public IdentifierPatternContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_identifierPattern; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).enterIdentifierPattern(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof EsqlBaseParserListener ) ((EsqlBaseParserListener)listener).exitIdentifierPattern(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof EsqlBaseParserVisitor ) return ((EsqlBaseParserVisitor)visitor).visitIdentifierPattern(this); + else return visitor.visitChildren(this); + } + } + + public final IdentifierPatternContext identifierPattern() throws RecognitionException { + IdentifierPatternContext _localctx = new IdentifierPatternContext(_ctx, getState()); + enterRule(_localctx, 48, RULE_identifierPattern); + int _la; + try { + enterOuterAlt(_localctx, 1); + { + setState(328); + _la = _input.LA(1); + if ( !(_la==QUOTED_IDENTIFIER || _la==PROJECT_UNQUOTED_IDENTIFIER) ) { + _errHandler.recoverInline(this); + } + else { + if ( _input.LA(1)==Token.EOF ) matchedEOF = true; + _errHandler.reportMatch(this); + consume(); + } + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + @SuppressWarnings("CheckReturnValue") public static class ConstantContext extends ParserRuleContext { @SuppressWarnings("this-escape") @@ -2698,17 +2839,17 @@ public T accept(ParseTreeVisitor visitor) { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_constant); + enterRule(_localctx, 50, RULE_constant); int _la; try { - setState(358); + setState(372); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,32,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,33,_ctx) ) { case 1: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(316); + setState(330); match(NULL); } break; @@ -2716,9 +2857,9 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new QualifiedIntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(317); + setState(331); integerValue(); - setState(318); + setState(332); match(UNQUOTED_IDENTIFIER); } break; @@ -2726,7 +2867,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(320); + setState(334); decimalValue(); } break; @@ -2734,7 +2875,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(321); + setState(335); integerValue(); } break; @@ -2742,7 +2883,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(322); + setState(336); booleanValue(); } break; @@ -2750,7 +2891,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new InputParamContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(323); + setState(337); match(PARAM); } break; @@ -2758,7 +2899,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(324); + setState(338); string(); } break; @@ -2766,27 +2907,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(325); + setState(339); match(OPENING_BRACKET); - setState(326); + setState(340); numericValue(); - setState(331); + setState(345); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(327); + setState(341); match(COMMA); - setState(328); + setState(342); numericValue(); } } - setState(333); + setState(347); _errHandler.sync(this); _la = _input.LA(1); } - setState(334); + setState(348); match(CLOSING_BRACKET); } break; @@ -2794,27 +2935,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(336); + setState(350); match(OPENING_BRACKET); - setState(337); + setState(351); booleanValue(); - setState(342); + setState(356); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(338); + setState(352); match(COMMA); - setState(339); + setState(353); booleanValue(); } } - setState(344); + setState(358); _errHandler.sync(this); _la = _input.LA(1); } - setState(345); + setState(359); match(CLOSING_BRACKET); } break; @@ -2822,27 +2963,27 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringArrayLiteralContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(347); + setState(361); match(OPENING_BRACKET); - setState(348); + setState(362); string(); - setState(353); + setState(367); _errHandler.sync(this); _la = _input.LA(1); while (_la==COMMA) { { { - setState(349); + setState(363); match(COMMA); - setState(350); + setState(364); string(); } } - setState(355); + setState(369); _errHandler.sync(this); _la = _input.LA(1); } - setState(356); + setState(370); match(CLOSING_BRACKET); } break; @@ -2885,13 +3026,13 @@ public T accept(ParseTreeVisitor visitor) { public final LimitCommandContext limitCommand() throws RecognitionException { LimitCommandContext _localctx = new LimitCommandContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_limitCommand); + enterRule(_localctx, 52, RULE_limitCommand); try { enterOuterAlt(_localctx, 1); { - setState(360); + setState(374); match(LIMIT); - setState(361); + setState(375); match(INTEGER_LITERAL); } } @@ -2941,32 +3082,32 @@ public T accept(ParseTreeVisitor visitor) { public final SortCommandContext sortCommand() throws RecognitionException { SortCommandContext _localctx = new SortCommandContext(_ctx, getState()); - enterRule(_localctx, 50, RULE_sortCommand); + enterRule(_localctx, 54, RULE_sortCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(363); + setState(377); match(SORT); - setState(364); + setState(378); orderExpression(); - setState(369); + setState(383); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(365); + setState(379); match(COMMA); - setState(366); + setState(380); orderExpression(); } } } - setState(371); + setState(385); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,33,_ctx); + _alt = getInterpreter().adaptivePredict(_input,34,_ctx); } } } @@ -3015,19 +3156,19 @@ public T accept(ParseTreeVisitor visitor) { public final OrderExpressionContext orderExpression() throws RecognitionException { OrderExpressionContext _localctx = new OrderExpressionContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_orderExpression); + enterRule(_localctx, 56, RULE_orderExpression); int _la; try { enterOuterAlt(_localctx, 1); { - setState(372); + setState(386); booleanExpression(0); - setState(374); + setState(388); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,34,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { case 1: { - setState(373); + setState(387); ((OrderExpressionContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -3041,14 +3182,14 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio } break; } - setState(378); + setState(392); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,35,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,36,_ctx) ) { case 1: { - setState(376); + setState(390); match(NULLS); - setState(377); + setState(391); ((OrderExpressionContext)_localctx).nullOrdering = _input.LT(1); _la = _input.LA(1); if ( !(_la==FIRST || _la==LAST) ) { @@ -3078,11 +3219,11 @@ public final OrderExpressionContext orderExpression() throws RecognitionExceptio @SuppressWarnings("CheckReturnValue") public static class KeepCommandContext extends ParserRuleContext { public TerminalNode KEEP() { return getToken(EsqlBaseParser.KEEP, 0); } - public List sourceIdentifier() { - return getRuleContexts(SourceIdentifierContext.class); + public List qualifiedNamePattern() { + return getRuleContexts(QualifiedNamePatternContext.class); } - public SourceIdentifierContext sourceIdentifier(int i) { - return getRuleContext(SourceIdentifierContext.class,i); + public QualifiedNamePatternContext qualifiedNamePattern(int i) { + return getRuleContext(QualifiedNamePatternContext.class,i); } public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } public TerminalNode COMMA(int i) { @@ -3111,63 +3252,63 @@ public T accept(ParseTreeVisitor visitor) { public final KeepCommandContext keepCommand() throws RecognitionException { KeepCommandContext _localctx = new KeepCommandContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_keepCommand); + enterRule(_localctx, 58, RULE_keepCommand); try { int _alt; - setState(398); + setState(412); _errHandler.sync(this); switch (_input.LA(1)) { case KEEP: enterOuterAlt(_localctx, 1); { - setState(380); + setState(394); match(KEEP); - setState(381); - sourceIdentifier(); - setState(386); + setState(395); + qualifiedNamePattern(); + setState(400); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,36,_ctx); + _alt = getInterpreter().adaptivePredict(_input,37,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(382); + setState(396); match(COMMA); - setState(383); - sourceIdentifier(); + setState(397); + qualifiedNamePattern(); } } } - setState(388); + setState(402); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,36,_ctx); + _alt = getInterpreter().adaptivePredict(_input,37,_ctx); } } break; case PROJECT: enterOuterAlt(_localctx, 2); { - setState(389); + setState(403); match(PROJECT); - setState(390); - sourceIdentifier(); - setState(395); + setState(404); + qualifiedNamePattern(); + setState(409); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,38,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(391); + setState(405); match(COMMA); - setState(392); - sourceIdentifier(); + setState(406); + qualifiedNamePattern(); } } } - setState(397); + setState(411); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,37,_ctx); + _alt = getInterpreter().adaptivePredict(_input,38,_ctx); } } break; @@ -3189,11 +3330,11 @@ public final KeepCommandContext keepCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class DropCommandContext extends ParserRuleContext { public TerminalNode DROP() { return getToken(EsqlBaseParser.DROP, 0); } - public List sourceIdentifier() { - return getRuleContexts(SourceIdentifierContext.class); + public List qualifiedNamePattern() { + return getRuleContexts(QualifiedNamePatternContext.class); } - public SourceIdentifierContext sourceIdentifier(int i) { - return getRuleContext(SourceIdentifierContext.class,i); + public QualifiedNamePatternContext qualifiedNamePattern(int i) { + return getRuleContext(QualifiedNamePatternContext.class,i); } public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } public TerminalNode COMMA(int i) { @@ -3221,32 +3362,32 @@ public T accept(ParseTreeVisitor visitor) { public final DropCommandContext dropCommand() throws RecognitionException { DropCommandContext _localctx = new DropCommandContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_dropCommand); + enterRule(_localctx, 60, RULE_dropCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(400); + setState(414); match(DROP); - setState(401); - sourceIdentifier(); - setState(406); + setState(415); + qualifiedNamePattern(); + setState(420); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,39,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(402); + setState(416); match(COMMA); - setState(403); - sourceIdentifier(); + setState(417); + qualifiedNamePattern(); } } } - setState(408); + setState(422); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,39,_ctx); + _alt = getInterpreter().adaptivePredict(_input,40,_ctx); } } } @@ -3296,32 +3437,32 @@ public T accept(ParseTreeVisitor visitor) { public final RenameCommandContext renameCommand() throws RecognitionException { RenameCommandContext _localctx = new RenameCommandContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_renameCommand); + enterRule(_localctx, 62, RULE_renameCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(409); + setState(423); match(RENAME); - setState(410); + setState(424); renameClause(); - setState(415); + setState(429); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,41,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(411); + setState(425); match(COMMA); - setState(412); + setState(426); renameClause(); } } } - setState(417); + setState(431); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,40,_ctx); + _alt = getInterpreter().adaptivePredict(_input,41,_ctx); } } } @@ -3338,14 +3479,14 @@ public final RenameCommandContext renameCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class RenameClauseContext extends ParserRuleContext { - public SourceIdentifierContext oldName; - public SourceIdentifierContext newName; + public QualifiedNamePatternContext oldName; + public QualifiedNamePatternContext newName; public TerminalNode AS() { return getToken(EsqlBaseParser.AS, 0); } - public List sourceIdentifier() { - return getRuleContexts(SourceIdentifierContext.class); + public List qualifiedNamePattern() { + return getRuleContexts(QualifiedNamePatternContext.class); } - public SourceIdentifierContext sourceIdentifier(int i) { - return getRuleContext(SourceIdentifierContext.class,i); + public QualifiedNamePatternContext qualifiedNamePattern(int i) { + return getRuleContext(QualifiedNamePatternContext.class,i); } @SuppressWarnings("this-escape") public RenameClauseContext(ParserRuleContext parent, int invokingState) { @@ -3369,16 +3510,16 @@ public T accept(ParseTreeVisitor visitor) { public final RenameClauseContext renameClause() throws RecognitionException { RenameClauseContext _localctx = new RenameClauseContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_renameClause); + enterRule(_localctx, 64, RULE_renameClause); try { enterOuterAlt(_localctx, 1); { - setState(418); - ((RenameClauseContext)_localctx).oldName = sourceIdentifier(); - setState(419); + setState(432); + ((RenameClauseContext)_localctx).oldName = qualifiedNamePattern(); + setState(433); match(AS); - setState(420); - ((RenameClauseContext)_localctx).newName = sourceIdentifier(); + setState(434); + ((RenameClauseContext)_localctx).newName = qualifiedNamePattern(); } } catch (RecognitionException re) { @@ -3426,22 +3567,22 @@ public T accept(ParseTreeVisitor visitor) { public final DissectCommandContext dissectCommand() throws RecognitionException { DissectCommandContext _localctx = new DissectCommandContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_dissectCommand); + enterRule(_localctx, 66, RULE_dissectCommand); try { enterOuterAlt(_localctx, 1); { - setState(422); + setState(436); match(DISSECT); - setState(423); + setState(437); primaryExpression(); - setState(424); + setState(438); string(); - setState(426); + setState(440); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,42,_ctx) ) { case 1: { - setState(425); + setState(439); commandOptions(); } break; @@ -3490,15 +3631,15 @@ public T accept(ParseTreeVisitor visitor) { public final GrokCommandContext grokCommand() throws RecognitionException { GrokCommandContext _localctx = new GrokCommandContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_grokCommand); + enterRule(_localctx, 68, RULE_grokCommand); try { enterOuterAlt(_localctx, 1); { - setState(428); + setState(442); match(GROK); - setState(429); + setState(443); primaryExpression(); - setState(430); + setState(444); string(); } } @@ -3516,8 +3657,8 @@ public final GrokCommandContext grokCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class MvExpandCommandContext extends ParserRuleContext { public TerminalNode MV_EXPAND() { return getToken(EsqlBaseParser.MV_EXPAND, 0); } - public SourceIdentifierContext sourceIdentifier() { - return getRuleContext(SourceIdentifierContext.class,0); + public QualifiedNameContext qualifiedName() { + return getRuleContext(QualifiedNameContext.class,0); } @SuppressWarnings("this-escape") public MvExpandCommandContext(ParserRuleContext parent, int invokingState) { @@ -3541,14 +3682,14 @@ public T accept(ParseTreeVisitor visitor) { public final MvExpandCommandContext mvExpandCommand() throws RecognitionException { MvExpandCommandContext _localctx = new MvExpandCommandContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_mvExpandCommand); + enterRule(_localctx, 70, RULE_mvExpandCommand); try { enterOuterAlt(_localctx, 1); { - setState(432); + setState(446); match(MV_EXPAND); - setState(433); - sourceIdentifier(); + setState(447); + qualifiedName(); } } catch (RecognitionException re) { @@ -3596,30 +3737,30 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionsContext commandOptions() throws RecognitionException { CommandOptionsContext _localctx = new CommandOptionsContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_commandOptions); + enterRule(_localctx, 72, RULE_commandOptions); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(435); + setState(449); commandOption(); - setState(440); + setState(454); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,42,_ctx); + _alt = getInterpreter().adaptivePredict(_input,43,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(436); + setState(450); match(COMMA); - setState(437); + setState(451); commandOption(); } } } - setState(442); + setState(456); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,42,_ctx); + _alt = getInterpreter().adaptivePredict(_input,43,_ctx); } } } @@ -3665,15 +3806,15 @@ public T accept(ParseTreeVisitor visitor) { public final CommandOptionContext commandOption() throws RecognitionException { CommandOptionContext _localctx = new CommandOptionContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_commandOption); + enterRule(_localctx, 74, RULE_commandOption); try { enterOuterAlt(_localctx, 1); { - setState(443); + setState(457); identifier(); - setState(444); + setState(458); match(ASSIGN); - setState(445); + setState(459); constant(); } } @@ -3714,12 +3855,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_booleanValue); + enterRule(_localctx, 76, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(447); + setState(461); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -3772,22 +3913,22 @@ public T accept(ParseTreeVisitor visitor) { public final NumericValueContext numericValue() throws RecognitionException { NumericValueContext _localctx = new NumericValueContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_numericValue); + enterRule(_localctx, 78, RULE_numericValue); try { - setState(451); + setState(465); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,43,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,44,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(449); + setState(463); decimalValue(); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(450); + setState(464); integerValue(); } break; @@ -3831,17 +3972,17 @@ public T accept(ParseTreeVisitor visitor) { public final DecimalValueContext decimalValue() throws RecognitionException { DecimalValueContext _localctx = new DecimalValueContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_decimalValue); + enterRule(_localctx, 80, RULE_decimalValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(454); + setState(468); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(453); + setState(467); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -3854,7 +3995,7 @@ public final DecimalValueContext decimalValue() throws RecognitionException { } } - setState(456); + setState(470); match(DECIMAL_LITERAL); } } @@ -3896,17 +4037,17 @@ public T accept(ParseTreeVisitor visitor) { public final IntegerValueContext integerValue() throws RecognitionException { IntegerValueContext _localctx = new IntegerValueContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_integerValue); + enterRule(_localctx, 82, RULE_integerValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(459); + setState(473); _errHandler.sync(this); _la = _input.LA(1); if (_la==PLUS || _la==MINUS) { { - setState(458); + setState(472); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { _errHandler.recoverInline(this); @@ -3919,7 +4060,7 @@ public final IntegerValueContext integerValue() throws RecognitionException { } } - setState(461); + setState(475); match(INTEGER_LITERAL); } } @@ -3959,11 +4100,11 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_string); + enterRule(_localctx, 84, RULE_string); try { enterOuterAlt(_localctx, 1); { - setState(463); + setState(477); match(STRING); } } @@ -4008,14 +4149,14 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_comparisonOperator); + enterRule(_localctx, 86, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(465); + setState(479); _la = _input.LA(1); - if ( !(((_la) & ~0x3f) == 0 && ((1L << _la) & 1134907106097364992L) != 0) ) { + if ( !(((_la) & ~0x3f) == 0 && ((1L << _la) & 283726776524341248L) != 0) ) { _errHandler.recoverInline(this); } else { @@ -4064,13 +4205,13 @@ public T accept(ParseTreeVisitor visitor) { public final ExplainCommandContext explainCommand() throws RecognitionException { ExplainCommandContext _localctx = new ExplainCommandContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_explainCommand); + enterRule(_localctx, 88, RULE_explainCommand); try { enterOuterAlt(_localctx, 1); { - setState(467); + setState(481); match(EXPLAIN); - setState(468); + setState(482); subqueryExpression(); } } @@ -4114,15 +4255,15 @@ public T accept(ParseTreeVisitor visitor) { public final SubqueryExpressionContext subqueryExpression() throws RecognitionException { SubqueryExpressionContext _localctx = new SubqueryExpressionContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_subqueryExpression); + enterRule(_localctx, 90, RULE_subqueryExpression); try { enterOuterAlt(_localctx, 1); { - setState(470); + setState(484); match(OPENING_BRACKET); - setState(471); + setState(485); query(0); - setState(472); + setState(486); match(CLOSING_BRACKET); } } @@ -4194,18 +4335,18 @@ public T accept(ParseTreeVisitor visitor) { public final ShowCommandContext showCommand() throws RecognitionException { ShowCommandContext _localctx = new ShowCommandContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_showCommand); + enterRule(_localctx, 92, RULE_showCommand); try { - setState(478); + setState(492); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,46,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { case 1: _localctx = new ShowInfoContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(474); + setState(488); match(SHOW); - setState(475); + setState(489); match(INFO); } break; @@ -4213,9 +4354,9 @@ public final ShowCommandContext showCommand() throws RecognitionException { _localctx = new ShowFunctionsContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(476); + setState(490); match(SHOW); - setState(477); + setState(491); match(FUNCTIONS); } break; @@ -4234,14 +4375,11 @@ public final ShowCommandContext showCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class EnrichCommandContext extends ParserRuleContext { - public SourceIdentifierContext policyName; - public SourceIdentifierContext matchField; + public FromIdentifierContext policyName; + public QualifiedNamePatternContext matchField; public TerminalNode ENRICH() { return getToken(EsqlBaseParser.ENRICH, 0); } - public List sourceIdentifier() { - return getRuleContexts(SourceIdentifierContext.class); - } - public SourceIdentifierContext sourceIdentifier(int i) { - return getRuleContext(SourceIdentifierContext.class,i); + public FromIdentifierContext fromIdentifier() { + return getRuleContext(FromIdentifierContext.class,0); } public TerminalNode ON() { return getToken(EsqlBaseParser.ON, 0); } public TerminalNode WITH() { return getToken(EsqlBaseParser.WITH, 0); } @@ -4251,6 +4389,9 @@ public List enrichWithClause() { public EnrichWithClauseContext enrichWithClause(int i) { return getRuleContext(EnrichWithClauseContext.class,i); } + public QualifiedNamePatternContext qualifiedNamePattern() { + return getRuleContext(QualifiedNamePatternContext.class,0); + } public List COMMA() { return getTokens(EsqlBaseParser.COMMA); } public TerminalNode COMMA(int i) { return getToken(EsqlBaseParser.COMMA, i); @@ -4277,53 +4418,53 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichCommandContext enrichCommand() throws RecognitionException { EnrichCommandContext _localctx = new EnrichCommandContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_enrichCommand); + enterRule(_localctx, 94, RULE_enrichCommand); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(480); + setState(494); match(ENRICH); - setState(481); - ((EnrichCommandContext)_localctx).policyName = sourceIdentifier(); - setState(484); + setState(495); + ((EnrichCommandContext)_localctx).policyName = fromIdentifier(); + setState(498); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,47,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,48,_ctx) ) { case 1: { - setState(482); + setState(496); match(ON); - setState(483); - ((EnrichCommandContext)_localctx).matchField = sourceIdentifier(); + setState(497); + ((EnrichCommandContext)_localctx).matchField = qualifiedNamePattern(); } break; } - setState(495); + setState(509); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,49,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { case 1: { - setState(486); + setState(500); match(WITH); - setState(487); + setState(501); enrichWithClause(); - setState(492); + setState(506); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,48,_ctx); + _alt = getInterpreter().adaptivePredict(_input,49,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(488); + setState(502); match(COMMA); - setState(489); + setState(503); enrichWithClause(); } } } - setState(494); + setState(508); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,48,_ctx); + _alt = getInterpreter().adaptivePredict(_input,49,_ctx); } } break; @@ -4343,13 +4484,13 @@ public final EnrichCommandContext enrichCommand() throws RecognitionException { @SuppressWarnings("CheckReturnValue") public static class EnrichWithClauseContext extends ParserRuleContext { - public SourceIdentifierContext newName; - public SourceIdentifierContext enrichField; - public List sourceIdentifier() { - return getRuleContexts(SourceIdentifierContext.class); + public QualifiedNamePatternContext newName; + public QualifiedNamePatternContext enrichField; + public List qualifiedNamePattern() { + return getRuleContexts(QualifiedNamePatternContext.class); } - public SourceIdentifierContext sourceIdentifier(int i) { - return getRuleContext(SourceIdentifierContext.class,i); + public QualifiedNamePatternContext qualifiedNamePattern(int i) { + return getRuleContext(QualifiedNamePatternContext.class,i); } public TerminalNode ASSIGN() { return getToken(EsqlBaseParser.ASSIGN, 0); } @SuppressWarnings("this-escape") @@ -4374,24 +4515,24 @@ public T accept(ParseTreeVisitor visitor) { public final EnrichWithClauseContext enrichWithClause() throws RecognitionException { EnrichWithClauseContext _localctx = new EnrichWithClauseContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_enrichWithClause); + enterRule(_localctx, 96, RULE_enrichWithClause); try { enterOuterAlt(_localctx, 1); { - setState(500); + setState(514); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,50,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,51,_ctx) ) { case 1: { - setState(497); - ((EnrichWithClauseContext)_localctx).newName = sourceIdentifier(); - setState(498); + setState(511); + ((EnrichWithClauseContext)_localctx).newName = qualifiedNamePattern(); + setState(512); match(ASSIGN); } break; } - setState(502); - ((EnrichWithClauseContext)_localctx).enrichField = sourceIdentifier(); + setState(516); + ((EnrichWithClauseContext)_localctx).enrichField = qualifiedNamePattern(); } } catch (RecognitionException re) { @@ -4443,7 +4584,7 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, } public static final String _serializedATN = - "\u0004\u0001Q\u01f9\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ + "\u0004\u0001b\u0207\u0002\u0000\u0007\u0000\u0002\u0001\u0007\u0001\u0002"+ "\u0002\u0007\u0002\u0002\u0003\u0007\u0003\u0002\u0004\u0007\u0004\u0002"+ "\u0005\u0007\u0005\u0002\u0006\u0007\u0006\u0002\u0007\u0007\u0007\u0002"+ "\b\u0007\b\u0002\t\u0007\t\u0002\n\u0007\n\u0002\u000b\u0007\u000b\u0002"+ @@ -4456,319 +4597,327 @@ private boolean operatorExpression_sempred(OperatorExpressionContext _localctx, "\u0002\u001f\u0007\u001f\u0002 \u0007 \u0002!\u0007!\u0002\"\u0007\"\u0002"+ "#\u0007#\u0002$\u0007$\u0002%\u0007%\u0002&\u0007&\u0002\'\u0007\'\u0002"+ "(\u0007(\u0002)\u0007)\u0002*\u0007*\u0002+\u0007+\u0002,\u0007,\u0002"+ - "-\u0007-\u0002.\u0007.\u0001\u0000\u0001\u0000\u0001\u0000\u0001\u0001"+ - "\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0005\u0001"+ - "h\b\u0001\n\u0001\f\u0001k\t\u0001\u0001\u0002\u0001\u0002\u0001\u0002"+ - "\u0001\u0002\u0003\u0002q\b\u0002\u0001\u0003\u0001\u0003\u0001\u0003"+ + "-\u0007-\u0002.\u0007.\u0002/\u0007/\u00020\u00070\u0001\u0000\u0001\u0000"+ + "\u0001\u0000\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001\u0001"+ + "\u0001\u0001\u0005\u0001l\b\u0001\n\u0001\f\u0001o\t\u0001\u0001\u0002"+ + "\u0001\u0002\u0001\u0002\u0001\u0002\u0003\u0002u\b\u0002\u0001\u0003"+ + "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003"+ - "\u0001\u0003\u0001\u0003\u0001\u0003\u0001\u0003\u0003\u0003\u0080\b\u0003"+ - "\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005\u0001\u0005\u0001\u0005"+ - "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u008c\b\u0005"+ + "\u0003\u0003\u0084\b\u0003\u0001\u0004\u0001\u0004\u0001\u0004\u0001\u0005"+ + "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0003\u0005\u0090\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005"+ + "\u0001\u0005\u0005\u0005\u0097\b\u0005\n\u0005\f\u0005\u009a\t\u0005\u0001"+ + "\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00a1"+ + "\b\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u00a5\b\u0005\u0001\u0005"+ "\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0005\u0005"+ - "\u0093\b\u0005\n\u0005\f\u0005\u0096\t\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0003\u0005\u009d\b\u0005\u0001\u0005\u0001"+ - "\u0005\u0003\u0005\u00a1\b\u0005\u0001\u0005\u0001\u0005\u0001\u0005\u0001"+ - "\u0005\u0001\u0005\u0001\u0005\u0005\u0005\u00a9\b\u0005\n\u0005\f\u0005"+ - "\u00ac\t\u0005\u0001\u0006\u0001\u0006\u0003\u0006\u00b0\b\u0006\u0001"+ - "\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00b7"+ - "\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003\u0006\u00bc\b\u0006"+ - "\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0003\u0007"+ - "\u00c3\b\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0003\b\u00c9\b\b\u0001"+ - "\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0005\b\u00d1\b\b\n\b\f\b\u00d4"+ - "\t\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0003\t\u00dd"+ - "\b\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0005\n\u00e5\b\n"+ - "\n\n\f\n\u00e8\t\n\u0003\n\u00ea\b\n\u0001\n\u0001\n\u0001\u000b\u0001"+ - "\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0005\f\u00f4\b\f\n\f\f\f\u00f7"+ - "\t\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0003\r\u00fe\b\r\u0001\u000e"+ - "\u0001\u000e\u0001\u000e\u0001\u000e\u0005\u000e\u0104\b\u000e\n\u000e"+ - "\f\u000e\u0107\t\u000e\u0001\u000e\u0003\u000e\u010a\b\u000e\u0001\u000f"+ - "\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0005\u000f\u0111\b\u000f"+ - "\n\u000f\f\u000f\u0114\t\u000f\u0001\u000f\u0001\u000f\u0001\u0010\u0001"+ - "\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0003\u0011\u011d\b\u0011\u0001"+ - "\u0011\u0001\u0011\u0003\u0011\u0121\b\u0011\u0001\u0012\u0001\u0012\u0001"+ - "\u0012\u0001\u0012\u0003\u0012\u0127\b\u0012\u0001\u0013\u0001\u0013\u0001"+ - "\u0013\u0005\u0013\u012c\b\u0013\n\u0013\f\u0013\u012f\t\u0013\u0001\u0014"+ - "\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015\u0005\u0015\u0136\b\u0015"+ - "\n\u0015\f\u0015\u0139\t\u0015\u0001\u0016\u0001\u0016\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0005"+ - "\u0017\u014a\b\u0017\n\u0017\f\u0017\u014d\t\u0017\u0001\u0017\u0001\u0017"+ - "\u0001\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0005\u0017\u0155\b\u0017"+ - "\n\u0017\f\u0017\u0158\t\u0017\u0001\u0017\u0001\u0017\u0001\u0017\u0001"+ - "\u0017\u0001\u0017\u0001\u0017\u0005\u0017\u0160\b\u0017\n\u0017\f\u0017"+ - "\u0163\t\u0017\u0001\u0017\u0001\u0017\u0003\u0017\u0167\b\u0017\u0001"+ - "\u0018\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019\u0001\u0019\u0001"+ - "\u0019\u0005\u0019\u0170\b\u0019\n\u0019\f\u0019\u0173\t\u0019\u0001\u001a"+ - "\u0001\u001a\u0003\u001a\u0177\b\u001a\u0001\u001a\u0001\u001a\u0003\u001a"+ - "\u017b\b\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001\u001b\u0005\u001b"+ - "\u0181\b\u001b\n\u001b\f\u001b\u0184\t\u001b\u0001\u001b\u0001\u001b\u0001"+ - "\u001b\u0001\u001b\u0005\u001b\u018a\b\u001b\n\u001b\f\u001b\u018d\t\u001b"+ - "\u0003\u001b\u018f\b\u001b\u0001\u001c\u0001\u001c\u0001\u001c\u0001\u001c"+ - "\u0005\u001c\u0195\b\u001c\n\u001c\f\u001c\u0198\t\u001c\u0001\u001d\u0001"+ - "\u001d\u0001\u001d\u0001\u001d\u0005\u001d\u019e\b\u001d\n\u001d\f\u001d"+ - "\u01a1\t\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001f"+ - "\u0001\u001f\u0001\u001f\u0001\u001f\u0003\u001f\u01ab\b\u001f\u0001 "+ - "\u0001 \u0001 \u0001 \u0001!\u0001!\u0001!\u0001\"\u0001\"\u0001\"\u0005"+ - "\"\u01b7\b\"\n\"\f\"\u01ba\t\"\u0001#\u0001#\u0001#\u0001#\u0001$\u0001"+ - "$\u0001%\u0001%\u0003%\u01c4\b%\u0001&\u0003&\u01c7\b&\u0001&\u0001&\u0001"+ - "\'\u0003\'\u01cc\b\'\u0001\'\u0001\'\u0001(\u0001(\u0001)\u0001)\u0001"+ - "*\u0001*\u0001*\u0001+\u0001+\u0001+\u0001+\u0001,\u0001,\u0001,\u0001"+ - ",\u0003,\u01df\b,\u0001-\u0001-\u0001-\u0001-\u0003-\u01e5\b-\u0001-\u0001"+ - "-\u0001-\u0001-\u0005-\u01eb\b-\n-\f-\u01ee\t-\u0003-\u01f0\b-\u0001."+ - "\u0001.\u0001.\u0003.\u01f5\b.\u0001.\u0001.\u0001.\u0000\u0003\u0002"+ - "\n\u0010/\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010\u0012\u0014\u0016"+ - "\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPRTVXZ\\\u0000\b\u0001"+ - "\u0000<=\u0001\u0000>@\u0001\u0000LM\u0001\u0000CD\u0002\u0000 ##\u0001"+ - "\u0000&\'\u0002\u0000%%33\u0001\u00006;\u0217\u0000^\u0001\u0000\u0000"+ - "\u0000\u0002a\u0001\u0000\u0000\u0000\u0004p\u0001\u0000\u0000\u0000\u0006"+ - "\u007f\u0001\u0000\u0000\u0000\b\u0081\u0001\u0000\u0000\u0000\n\u00a0"+ - "\u0001\u0000\u0000\u0000\f\u00bb\u0001\u0000\u0000\u0000\u000e\u00c2\u0001"+ - "\u0000\u0000\u0000\u0010\u00c8\u0001\u0000\u0000\u0000\u0012\u00dc\u0001"+ - "\u0000\u0000\u0000\u0014\u00de\u0001\u0000\u0000\u0000\u0016\u00ed\u0001"+ - "\u0000\u0000\u0000\u0018\u00f0\u0001\u0000\u0000\u0000\u001a\u00fd\u0001"+ - "\u0000\u0000\u0000\u001c\u00ff\u0001\u0000\u0000\u0000\u001e\u010b\u0001"+ - "\u0000\u0000\u0000 \u0117\u0001\u0000\u0000\u0000\"\u011a\u0001\u0000"+ - "\u0000\u0000$\u0122\u0001\u0000\u0000\u0000&\u0128\u0001\u0000\u0000\u0000"+ - "(\u0130\u0001\u0000\u0000\u0000*\u0132\u0001\u0000\u0000\u0000,\u013a"+ - "\u0001\u0000\u0000\u0000.\u0166\u0001\u0000\u0000\u00000\u0168\u0001\u0000"+ - "\u0000\u00002\u016b\u0001\u0000\u0000\u00004\u0174\u0001\u0000\u0000\u0000"+ - "6\u018e\u0001\u0000\u0000\u00008\u0190\u0001\u0000\u0000\u0000:\u0199"+ - "\u0001\u0000\u0000\u0000<\u01a2\u0001\u0000\u0000\u0000>\u01a6\u0001\u0000"+ - "\u0000\u0000@\u01ac\u0001\u0000\u0000\u0000B\u01b0\u0001\u0000\u0000\u0000"+ - "D\u01b3\u0001\u0000\u0000\u0000F\u01bb\u0001\u0000\u0000\u0000H\u01bf"+ - "\u0001\u0000\u0000\u0000J\u01c3\u0001\u0000\u0000\u0000L\u01c6\u0001\u0000"+ - "\u0000\u0000N\u01cb\u0001\u0000\u0000\u0000P\u01cf\u0001\u0000\u0000\u0000"+ - "R\u01d1\u0001\u0000\u0000\u0000T\u01d3\u0001\u0000\u0000\u0000V\u01d6"+ - "\u0001\u0000\u0000\u0000X\u01de\u0001\u0000\u0000\u0000Z\u01e0\u0001\u0000"+ - "\u0000\u0000\\\u01f4\u0001\u0000\u0000\u0000^_\u0003\u0002\u0001\u0000"+ - "_`\u0005\u0000\u0000\u0001`\u0001\u0001\u0000\u0000\u0000ab\u0006\u0001"+ - "\uffff\uffff\u0000bc\u0003\u0004\u0002\u0000ci\u0001\u0000\u0000\u0000"+ - "de\n\u0001\u0000\u0000ef\u0005\u001a\u0000\u0000fh\u0003\u0006\u0003\u0000"+ - "gd\u0001\u0000\u0000\u0000hk\u0001\u0000\u0000\u0000ig\u0001\u0000\u0000"+ - "\u0000ij\u0001\u0000\u0000\u0000j\u0003\u0001\u0000\u0000\u0000ki\u0001"+ - "\u0000\u0000\u0000lq\u0003T*\u0000mq\u0003\u001c\u000e\u0000nq\u0003\u0016"+ - "\u000b\u0000oq\u0003X,\u0000pl\u0001\u0000\u0000\u0000pm\u0001\u0000\u0000"+ - "\u0000pn\u0001\u0000\u0000\u0000po\u0001\u0000\u0000\u0000q\u0005\u0001"+ - "\u0000\u0000\u0000r\u0080\u0003 \u0010\u0000s\u0080\u0003$\u0012\u0000"+ - "t\u0080\u00030\u0018\u0000u\u0080\u00036\u001b\u0000v\u0080\u00032\u0019"+ - "\u0000w\u0080\u0003\"\u0011\u0000x\u0080\u0003\b\u0004\u0000y\u0080\u0003"+ - "8\u001c\u0000z\u0080\u0003:\u001d\u0000{\u0080\u0003>\u001f\u0000|\u0080"+ - "\u0003@ \u0000}\u0080\u0003Z-\u0000~\u0080\u0003B!\u0000\u007fr\u0001"+ - "\u0000\u0000\u0000\u007fs\u0001\u0000\u0000\u0000\u007ft\u0001\u0000\u0000"+ - "\u0000\u007fu\u0001\u0000\u0000\u0000\u007fv\u0001\u0000\u0000\u0000\u007f"+ - "w\u0001\u0000\u0000\u0000\u007fx\u0001\u0000\u0000\u0000\u007fy\u0001"+ - "\u0000\u0000\u0000\u007fz\u0001\u0000\u0000\u0000\u007f{\u0001\u0000\u0000"+ - "\u0000\u007f|\u0001\u0000\u0000\u0000\u007f}\u0001\u0000\u0000\u0000\u007f"+ - "~\u0001\u0000\u0000\u0000\u0080\u0007\u0001\u0000\u0000\u0000\u0081\u0082"+ - "\u0005\u0012\u0000\u0000\u0082\u0083\u0003\n\u0005\u0000\u0083\t\u0001"+ - "\u0000\u0000\u0000\u0084\u0085\u0006\u0005\uffff\uffff\u0000\u0085\u0086"+ - "\u0005,\u0000\u0000\u0086\u00a1\u0003\n\u0005\u0007\u0087\u00a1\u0003"+ - "\u000e\u0007\u0000\u0088\u00a1\u0003\f\u0006\u0000\u0089\u008b\u0003\u000e"+ - "\u0007\u0000\u008a\u008c\u0005,\u0000\u0000\u008b\u008a\u0001\u0000\u0000"+ - "\u0000\u008b\u008c\u0001\u0000\u0000\u0000\u008c\u008d\u0001\u0000\u0000"+ - "\u0000\u008d\u008e\u0005)\u0000\u0000\u008e\u008f\u0005(\u0000\u0000\u008f"+ - "\u0094\u0003\u000e\u0007\u0000\u0090\u0091\u0005\"\u0000\u0000\u0091\u0093"+ - "\u0003\u000e\u0007\u0000\u0092\u0090\u0001\u0000\u0000\u0000\u0093\u0096"+ - "\u0001\u0000\u0000\u0000\u0094\u0092\u0001\u0000\u0000\u0000\u0094\u0095"+ - "\u0001\u0000\u0000\u0000\u0095\u0097\u0001\u0000\u0000\u0000\u0096\u0094"+ - "\u0001\u0000\u0000\u0000\u0097\u0098\u00052\u0000\u0000\u0098\u00a1\u0001"+ - "\u0000\u0000\u0000\u0099\u009a\u0003\u000e\u0007\u0000\u009a\u009c\u0005"+ - "*\u0000\u0000\u009b\u009d\u0005,\u0000\u0000\u009c\u009b\u0001\u0000\u0000"+ - "\u0000\u009c\u009d\u0001\u0000\u0000\u0000\u009d\u009e\u0001\u0000\u0000"+ - "\u0000\u009e\u009f\u0005-\u0000\u0000\u009f\u00a1\u0001\u0000\u0000\u0000"+ - "\u00a0\u0084\u0001\u0000\u0000\u0000\u00a0\u0087\u0001\u0000\u0000\u0000"+ - "\u00a0\u0088\u0001\u0000\u0000\u0000\u00a0\u0089\u0001\u0000\u0000\u0000"+ - "\u00a0\u0099\u0001\u0000\u0000\u0000\u00a1\u00aa\u0001\u0000\u0000\u0000"+ - "\u00a2\u00a3\n\u0004\u0000\u0000\u00a3\u00a4\u0005\u001f\u0000\u0000\u00a4"+ - "\u00a9\u0003\n\u0005\u0005\u00a5\u00a6\n\u0003\u0000\u0000\u00a6\u00a7"+ - "\u0005/\u0000\u0000\u00a7\u00a9\u0003\n\u0005\u0004\u00a8\u00a2\u0001"+ - "\u0000\u0000\u0000\u00a8\u00a5\u0001\u0000\u0000\u0000\u00a9\u00ac\u0001"+ - "\u0000\u0000\u0000\u00aa\u00a8\u0001\u0000\u0000\u0000\u00aa\u00ab\u0001"+ - "\u0000\u0000\u0000\u00ab\u000b\u0001\u0000\u0000\u0000\u00ac\u00aa\u0001"+ - "\u0000\u0000\u0000\u00ad\u00af\u0003\u000e\u0007\u0000\u00ae\u00b0\u0005"+ - ",\u0000\u0000\u00af\u00ae\u0001\u0000\u0000\u0000\u00af\u00b0\u0001\u0000"+ - "\u0000\u0000\u00b0\u00b1\u0001\u0000\u0000\u0000\u00b1\u00b2\u0005+\u0000"+ - "\u0000\u00b2\u00b3\u0003P(\u0000\u00b3\u00bc\u0001\u0000\u0000\u0000\u00b4"+ - "\u00b6\u0003\u000e\u0007\u0000\u00b5\u00b7\u0005,\u0000\u0000\u00b6\u00b5"+ - "\u0001\u0000\u0000\u0000\u00b6\u00b7\u0001\u0000\u0000\u0000\u00b7\u00b8"+ - "\u0001\u0000\u0000\u0000\u00b8\u00b9\u00051\u0000\u0000\u00b9\u00ba\u0003"+ - "P(\u0000\u00ba\u00bc\u0001\u0000\u0000\u0000\u00bb\u00ad\u0001\u0000\u0000"+ - "\u0000\u00bb\u00b4\u0001\u0000\u0000\u0000\u00bc\r\u0001\u0000\u0000\u0000"+ - "\u00bd\u00c3\u0003\u0010\b\u0000\u00be\u00bf\u0003\u0010\b\u0000\u00bf"+ - "\u00c0\u0003R)\u0000\u00c0\u00c1\u0003\u0010\b\u0000\u00c1\u00c3\u0001"+ - "\u0000\u0000\u0000\u00c2\u00bd\u0001\u0000\u0000\u0000\u00c2\u00be\u0001"+ - "\u0000\u0000\u0000\u00c3\u000f\u0001\u0000\u0000\u0000\u00c4\u00c5\u0006"+ - "\b\uffff\uffff\u0000\u00c5\u00c9\u0003\u0012\t\u0000\u00c6\u00c7\u0007"+ - "\u0000\u0000\u0000\u00c7\u00c9\u0003\u0010\b\u0003\u00c8\u00c4\u0001\u0000"+ - "\u0000\u0000\u00c8\u00c6\u0001\u0000\u0000\u0000\u00c9\u00d2\u0001\u0000"+ - "\u0000\u0000\u00ca\u00cb\n\u0002\u0000\u0000\u00cb\u00cc\u0007\u0001\u0000"+ - "\u0000\u00cc\u00d1\u0003\u0010\b\u0003\u00cd\u00ce\n\u0001\u0000\u0000"+ - "\u00ce\u00cf\u0007\u0000\u0000\u0000\u00cf\u00d1\u0003\u0010\b\u0002\u00d0"+ - "\u00ca\u0001\u0000\u0000\u0000\u00d0\u00cd\u0001\u0000\u0000\u0000\u00d1"+ - "\u00d4\u0001\u0000\u0000\u0000\u00d2\u00d0\u0001\u0000\u0000\u0000\u00d2"+ - "\u00d3\u0001\u0000\u0000\u0000\u00d3\u0011\u0001\u0000\u0000\u0000\u00d4"+ - "\u00d2\u0001\u0000\u0000\u0000\u00d5\u00dd\u0003.\u0017\u0000\u00d6\u00dd"+ - "\u0003*\u0015\u0000\u00d7\u00dd\u0003\u0014\n\u0000\u00d8\u00d9\u0005"+ - "(\u0000\u0000\u00d9\u00da\u0003\n\u0005\u0000\u00da\u00db\u00052\u0000"+ - "\u0000\u00db\u00dd\u0001\u0000\u0000\u0000\u00dc\u00d5\u0001\u0000\u0000"+ - "\u0000\u00dc\u00d6\u0001\u0000\u0000\u0000\u00dc\u00d7\u0001\u0000\u0000"+ - "\u0000\u00dc\u00d8\u0001\u0000\u0000\u0000\u00dd\u0013\u0001\u0000\u0000"+ - "\u0000\u00de\u00df\u0003,\u0016\u0000\u00df\u00e9\u0005(\u0000\u0000\u00e0"+ - "\u00ea\u0005>\u0000\u0000\u00e1\u00e6\u0003\n\u0005\u0000\u00e2\u00e3"+ - "\u0005\"\u0000\u0000\u00e3\u00e5\u0003\n\u0005\u0000\u00e4\u00e2\u0001"+ - "\u0000\u0000\u0000\u00e5\u00e8\u0001\u0000\u0000\u0000\u00e6\u00e4\u0001"+ - "\u0000\u0000\u0000\u00e6\u00e7\u0001\u0000\u0000\u0000\u00e7\u00ea\u0001"+ - "\u0000\u0000\u0000\u00e8\u00e6\u0001\u0000\u0000\u0000\u00e9\u00e0\u0001"+ - "\u0000\u0000\u0000\u00e9\u00e1\u0001\u0000\u0000\u0000\u00e9\u00ea\u0001"+ - "\u0000\u0000\u0000\u00ea\u00eb\u0001\u0000\u0000\u0000\u00eb\u00ec\u0005"+ - "2\u0000\u0000\u00ec\u0015\u0001\u0000\u0000\u0000\u00ed\u00ee\u0005\u000e"+ - "\u0000\u0000\u00ee\u00ef\u0003\u0018\f\u0000\u00ef\u0017\u0001\u0000\u0000"+ - "\u0000\u00f0\u00f5\u0003\u001a\r\u0000\u00f1\u00f2\u0005\"\u0000\u0000"+ - "\u00f2\u00f4\u0003\u001a\r\u0000\u00f3\u00f1\u0001\u0000\u0000\u0000\u00f4"+ - "\u00f7\u0001\u0000\u0000\u0000\u00f5\u00f3\u0001\u0000\u0000\u0000\u00f5"+ - "\u00f6\u0001\u0000\u0000\u0000\u00f6\u0019\u0001\u0000\u0000\u0000\u00f7"+ - "\u00f5\u0001\u0000\u0000\u0000\u00f8\u00fe\u0003\n\u0005\u0000\u00f9\u00fa"+ - "\u0003*\u0015\u0000\u00fa\u00fb\u0005!\u0000\u0000\u00fb\u00fc\u0003\n"+ - "\u0005\u0000\u00fc\u00fe\u0001\u0000\u0000\u0000\u00fd\u00f8\u0001\u0000"+ - "\u0000\u0000\u00fd\u00f9\u0001\u0000\u0000\u0000\u00fe\u001b\u0001\u0000"+ - "\u0000\u0000\u00ff\u0100\u0005\u0006\u0000\u0000\u0100\u0105\u0003(\u0014"+ - "\u0000\u0101\u0102\u0005\"\u0000\u0000\u0102\u0104\u0003(\u0014\u0000"+ - "\u0103\u0101\u0001\u0000\u0000\u0000\u0104\u0107\u0001\u0000\u0000\u0000"+ - "\u0105\u0103\u0001\u0000\u0000\u0000\u0105\u0106\u0001\u0000\u0000\u0000"+ - "\u0106\u0109\u0001\u0000\u0000\u0000\u0107\u0105\u0001\u0000\u0000\u0000"+ - "\u0108\u010a\u0003\u001e\u000f\u0000\u0109\u0108\u0001\u0000\u0000\u0000"+ - "\u0109\u010a\u0001\u0000\u0000\u0000\u010a\u001d\u0001\u0000\u0000\u0000"+ - "\u010b\u010c\u0005A\u0000\u0000\u010c\u010d\u0005I\u0000\u0000\u010d\u0112"+ - "\u0003(\u0014\u0000\u010e\u010f\u0005\"\u0000\u0000\u010f\u0111\u0003"+ - "(\u0014\u0000\u0110\u010e\u0001\u0000\u0000\u0000\u0111\u0114\u0001\u0000"+ - "\u0000\u0000\u0112\u0110\u0001\u0000\u0000\u0000\u0112\u0113\u0001\u0000"+ - "\u0000\u0000\u0113\u0115\u0001\u0000\u0000\u0000\u0114\u0112\u0001\u0000"+ - "\u0000\u0000\u0115\u0116\u0005B\u0000\u0000\u0116\u001f\u0001\u0000\u0000"+ - "\u0000\u0117\u0118\u0005\u0004\u0000\u0000\u0118\u0119\u0003\u0018\f\u0000"+ - "\u0119!\u0001\u0000\u0000\u0000\u011a\u011c\u0005\u0011\u0000\u0000\u011b"+ - "\u011d\u0003\u0018\f\u0000\u011c\u011b\u0001\u0000\u0000\u0000\u011c\u011d"+ - "\u0001\u0000\u0000\u0000\u011d\u0120\u0001\u0000\u0000\u0000\u011e\u011f"+ - "\u0005\u001e\u0000\u0000\u011f\u0121\u0003&\u0013\u0000\u0120\u011e\u0001"+ - "\u0000\u0000\u0000\u0120\u0121\u0001\u0000\u0000\u0000\u0121#\u0001\u0000"+ - "\u0000\u0000\u0122\u0123\u0005\b\u0000\u0000\u0123\u0126\u0003\u0018\f"+ - "\u0000\u0124\u0125\u0005\u001e\u0000\u0000\u0125\u0127\u0003&\u0013\u0000"+ - "\u0126\u0124\u0001\u0000\u0000\u0000\u0126\u0127\u0001\u0000\u0000\u0000"+ - "\u0127%\u0001\u0000\u0000\u0000\u0128\u012d\u0003*\u0015\u0000\u0129\u012a"+ - "\u0005\"\u0000\u0000\u012a\u012c\u0003*\u0015\u0000\u012b\u0129\u0001"+ - "\u0000\u0000\u0000\u012c\u012f\u0001\u0000\u0000\u0000\u012d\u012b\u0001"+ - "\u0000\u0000\u0000\u012d\u012e\u0001\u0000\u0000\u0000\u012e\'\u0001\u0000"+ - "\u0000\u0000\u012f\u012d\u0001\u0000\u0000\u0000\u0130\u0131\u0007\u0002"+ - "\u0000\u0000\u0131)\u0001\u0000\u0000\u0000\u0132\u0137\u0003,\u0016\u0000"+ - "\u0133\u0134\u0005$\u0000\u0000\u0134\u0136\u0003,\u0016\u0000\u0135\u0133"+ - "\u0001\u0000\u0000\u0000\u0136\u0139\u0001\u0000\u0000\u0000\u0137\u0135"+ - "\u0001\u0000\u0000\u0000\u0137\u0138\u0001\u0000\u0000\u0000\u0138+\u0001"+ - "\u0000\u0000\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u013a\u013b\u0007"+ - "\u0003\u0000\u0000\u013b-\u0001\u0000\u0000\u0000\u013c\u0167\u0005-\u0000"+ - "\u0000\u013d\u013e\u0003N\'\u0000\u013e\u013f\u0005C\u0000\u0000\u013f"+ - "\u0167\u0001\u0000\u0000\u0000\u0140\u0167\u0003L&\u0000\u0141\u0167\u0003"+ - "N\'\u0000\u0142\u0167\u0003H$\u0000\u0143\u0167\u00050\u0000\u0000\u0144"+ - "\u0167\u0003P(\u0000\u0145\u0146\u0005A\u0000\u0000\u0146\u014b\u0003"+ - "J%\u0000\u0147\u0148\u0005\"\u0000\u0000\u0148\u014a\u0003J%\u0000\u0149"+ - "\u0147\u0001\u0000\u0000\u0000\u014a\u014d\u0001\u0000\u0000\u0000\u014b"+ - "\u0149\u0001\u0000\u0000\u0000\u014b\u014c\u0001\u0000\u0000\u0000\u014c"+ - "\u014e\u0001\u0000\u0000\u0000\u014d\u014b\u0001\u0000\u0000\u0000\u014e"+ - "\u014f\u0005B\u0000\u0000\u014f\u0167\u0001\u0000\u0000\u0000\u0150\u0151"+ - "\u0005A\u0000\u0000\u0151\u0156\u0003H$\u0000\u0152\u0153\u0005\"\u0000"+ - "\u0000\u0153\u0155\u0003H$\u0000\u0154\u0152\u0001\u0000\u0000\u0000\u0155"+ - "\u0158\u0001\u0000\u0000\u0000\u0156\u0154\u0001\u0000\u0000\u0000\u0156"+ - "\u0157\u0001\u0000\u0000\u0000\u0157\u0159\u0001\u0000\u0000\u0000\u0158"+ - "\u0156\u0001\u0000\u0000\u0000\u0159\u015a\u0005B\u0000\u0000\u015a\u0167"+ - "\u0001\u0000\u0000\u0000\u015b\u015c\u0005A\u0000\u0000\u015c\u0161\u0003"+ - "P(\u0000\u015d\u015e\u0005\"\u0000\u0000\u015e\u0160\u0003P(\u0000\u015f"+ - "\u015d\u0001\u0000\u0000\u0000\u0160\u0163\u0001\u0000\u0000\u0000\u0161"+ - "\u015f\u0001\u0000\u0000\u0000\u0161\u0162\u0001\u0000\u0000\u0000\u0162"+ - "\u0164\u0001\u0000\u0000\u0000\u0163\u0161\u0001\u0000\u0000\u0000\u0164"+ - "\u0165\u0005B\u0000\u0000\u0165\u0167\u0001\u0000\u0000\u0000\u0166\u013c"+ - "\u0001\u0000\u0000\u0000\u0166\u013d\u0001\u0000\u0000\u0000\u0166\u0140"+ - "\u0001\u0000\u0000\u0000\u0166\u0141\u0001\u0000\u0000\u0000\u0166\u0142"+ - "\u0001\u0000\u0000\u0000\u0166\u0143\u0001\u0000\u0000\u0000\u0166\u0144"+ - "\u0001\u0000\u0000\u0000\u0166\u0145\u0001\u0000\u0000\u0000\u0166\u0150"+ - "\u0001\u0000\u0000\u0000\u0166\u015b\u0001\u0000\u0000\u0000\u0167/\u0001"+ - "\u0000\u0000\u0000\u0168\u0169\u0005\n\u0000\u0000\u0169\u016a\u0005\u001c"+ - "\u0000\u0000\u016a1\u0001\u0000\u0000\u0000\u016b\u016c\u0005\u0010\u0000"+ - "\u0000\u016c\u0171\u00034\u001a\u0000\u016d\u016e\u0005\"\u0000\u0000"+ - "\u016e\u0170\u00034\u001a\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u0170"+ - "\u0173\u0001\u0000\u0000\u0000\u0171\u016f\u0001\u0000\u0000\u0000\u0171"+ - "\u0172\u0001\u0000\u0000\u0000\u01723\u0001\u0000\u0000\u0000\u0173\u0171"+ - "\u0001\u0000\u0000\u0000\u0174\u0176\u0003\n\u0005\u0000\u0175\u0177\u0007"+ - "\u0004\u0000\u0000\u0176\u0175\u0001\u0000\u0000\u0000\u0176\u0177\u0001"+ - "\u0000\u0000\u0000\u0177\u017a\u0001\u0000\u0000\u0000\u0178\u0179\u0005"+ - ".\u0000\u0000\u0179\u017b\u0007\u0005\u0000\u0000\u017a\u0178\u0001\u0000"+ - "\u0000\u0000\u017a\u017b\u0001\u0000\u0000\u0000\u017b5\u0001\u0000\u0000"+ - "\u0000\u017c\u017d\u0005\t\u0000\u0000\u017d\u0182\u0003(\u0014\u0000"+ - "\u017e\u017f\u0005\"\u0000\u0000\u017f\u0181\u0003(\u0014\u0000\u0180"+ - "\u017e\u0001\u0000\u0000\u0000\u0181\u0184\u0001\u0000\u0000\u0000\u0182"+ - "\u0180\u0001\u0000\u0000\u0000\u0182\u0183\u0001\u0000\u0000\u0000\u0183"+ - "\u018f\u0001\u0000\u0000\u0000\u0184\u0182\u0001\u0000\u0000\u0000\u0185"+ - "\u0186\u0005\f\u0000\u0000\u0186\u018b\u0003(\u0014\u0000\u0187\u0188"+ - "\u0005\"\u0000\u0000\u0188\u018a\u0003(\u0014\u0000\u0189\u0187\u0001"+ - "\u0000\u0000\u0000\u018a\u018d\u0001\u0000\u0000\u0000\u018b\u0189\u0001"+ - "\u0000\u0000\u0000\u018b\u018c\u0001\u0000\u0000\u0000\u018c\u018f\u0001"+ - "\u0000\u0000\u0000\u018d\u018b\u0001\u0000\u0000\u0000\u018e\u017c\u0001"+ - "\u0000\u0000\u0000\u018e\u0185\u0001\u0000\u0000\u0000\u018f7\u0001\u0000"+ - "\u0000\u0000\u0190\u0191\u0005\u0002\u0000\u0000\u0191\u0196\u0003(\u0014"+ - "\u0000\u0192\u0193\u0005\"\u0000\u0000\u0193\u0195\u0003(\u0014\u0000"+ - "\u0194\u0192\u0001\u0000\u0000\u0000\u0195\u0198\u0001\u0000\u0000\u0000"+ - "\u0196\u0194\u0001\u0000\u0000\u0000\u0196\u0197\u0001\u0000\u0000\u0000"+ - "\u01979\u0001\u0000\u0000\u0000\u0198\u0196\u0001\u0000\u0000\u0000\u0199"+ - "\u019a\u0005\r\u0000\u0000\u019a\u019f\u0003<\u001e\u0000\u019b\u019c"+ - "\u0005\"\u0000\u0000\u019c\u019e\u0003<\u001e\u0000\u019d\u019b\u0001"+ - "\u0000\u0000\u0000\u019e\u01a1\u0001\u0000\u0000\u0000\u019f\u019d\u0001"+ - "\u0000\u0000\u0000\u019f\u01a0\u0001\u0000\u0000\u0000\u01a0;\u0001\u0000"+ - "\u0000\u0000\u01a1\u019f\u0001\u0000\u0000\u0000\u01a2\u01a3\u0003(\u0014"+ - "\u0000\u01a3\u01a4\u0005H\u0000\u0000\u01a4\u01a5\u0003(\u0014\u0000\u01a5"+ - "=\u0001\u0000\u0000\u0000\u01a6\u01a7\u0005\u0001\u0000\u0000\u01a7\u01a8"+ - "\u0003\u0012\t\u0000\u01a8\u01aa\u0003P(\u0000\u01a9\u01ab\u0003D\"\u0000"+ - "\u01aa\u01a9\u0001\u0000\u0000\u0000\u01aa\u01ab\u0001\u0000\u0000\u0000"+ - "\u01ab?\u0001\u0000\u0000\u0000\u01ac\u01ad\u0005\u0007\u0000\u0000\u01ad"+ - "\u01ae\u0003\u0012\t\u0000\u01ae\u01af\u0003P(\u0000\u01afA\u0001\u0000"+ - "\u0000\u0000\u01b0\u01b1\u0005\u000b\u0000\u0000\u01b1\u01b2\u0003(\u0014"+ - "\u0000\u01b2C\u0001\u0000\u0000\u0000\u01b3\u01b8\u0003F#\u0000\u01b4"+ - "\u01b5\u0005\"\u0000\u0000\u01b5\u01b7\u0003F#\u0000\u01b6\u01b4\u0001"+ - "\u0000\u0000\u0000\u01b7\u01ba\u0001\u0000\u0000\u0000\u01b8\u01b6\u0001"+ - "\u0000\u0000\u0000\u01b8\u01b9\u0001\u0000\u0000\u0000\u01b9E\u0001\u0000"+ - "\u0000\u0000\u01ba\u01b8\u0001\u0000\u0000\u0000\u01bb\u01bc\u0003,\u0016"+ - "\u0000\u01bc\u01bd\u0005!\u0000\u0000\u01bd\u01be\u0003.\u0017\u0000\u01be"+ - "G\u0001\u0000\u0000\u0000\u01bf\u01c0\u0007\u0006\u0000\u0000\u01c0I\u0001"+ - "\u0000\u0000\u0000\u01c1\u01c4\u0003L&\u0000\u01c2\u01c4\u0003N\'\u0000"+ - "\u01c3\u01c1\u0001\u0000\u0000\u0000\u01c3\u01c2\u0001\u0000\u0000\u0000"+ - "\u01c4K\u0001\u0000\u0000\u0000\u01c5\u01c7\u0007\u0000\u0000\u0000\u01c6"+ - "\u01c5\u0001\u0000\u0000\u0000\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7"+ - "\u01c8\u0001\u0000\u0000\u0000\u01c8\u01c9\u0005\u001d\u0000\u0000\u01c9"+ - "M\u0001\u0000\u0000\u0000\u01ca\u01cc\u0007\u0000\u0000\u0000\u01cb\u01ca"+ - "\u0001\u0000\u0000\u0000\u01cb\u01cc\u0001\u0000\u0000\u0000\u01cc\u01cd"+ - "\u0001\u0000\u0000\u0000\u01cd\u01ce\u0005\u001c\u0000\u0000\u01ceO\u0001"+ - "\u0000\u0000\u0000\u01cf\u01d0\u0005\u001b\u0000\u0000\u01d0Q\u0001\u0000"+ - "\u0000\u0000\u01d1\u01d2\u0007\u0007\u0000\u0000\u01d2S\u0001\u0000\u0000"+ - "\u0000\u01d3\u01d4\u0005\u0005\u0000\u0000\u01d4\u01d5\u0003V+\u0000\u01d5"+ - "U\u0001\u0000\u0000\u0000\u01d6\u01d7\u0005A\u0000\u0000\u01d7\u01d8\u0003"+ - "\u0002\u0001\u0000\u01d8\u01d9\u0005B\u0000\u0000\u01d9W\u0001\u0000\u0000"+ - "\u0000\u01da\u01db\u0005\u000f\u0000\u0000\u01db\u01df\u00054\u0000\u0000"+ - "\u01dc\u01dd\u0005\u000f\u0000\u0000\u01dd\u01df\u00055\u0000\u0000\u01de"+ - "\u01da\u0001\u0000\u0000\u0000\u01de\u01dc\u0001\u0000\u0000\u0000\u01df"+ - "Y\u0001\u0000\u0000\u0000\u01e0\u01e1\u0005\u0003\u0000\u0000\u01e1\u01e4"+ - "\u0003(\u0014\u0000\u01e2\u01e3\u0005J\u0000\u0000\u01e3\u01e5\u0003("+ - "\u0014\u0000\u01e4\u01e2\u0001\u0000\u0000\u0000\u01e4\u01e5\u0001\u0000"+ - "\u0000\u0000\u01e5\u01ef\u0001\u0000\u0000\u0000\u01e6\u01e7\u0005K\u0000"+ - "\u0000\u01e7\u01ec\u0003\\.\u0000\u01e8\u01e9\u0005\"\u0000\u0000\u01e9"+ - "\u01eb\u0003\\.\u0000\u01ea\u01e8\u0001\u0000\u0000\u0000\u01eb\u01ee"+ - "\u0001\u0000\u0000\u0000\u01ec\u01ea\u0001\u0000\u0000\u0000\u01ec\u01ed"+ - "\u0001\u0000\u0000\u0000\u01ed\u01f0\u0001\u0000\u0000\u0000\u01ee\u01ec"+ - "\u0001\u0000\u0000\u0000\u01ef\u01e6\u0001\u0000\u0000\u0000\u01ef\u01f0"+ - "\u0001\u0000\u0000\u0000\u01f0[\u0001\u0000\u0000\u0000\u01f1\u01f2\u0003"+ - "(\u0014\u0000\u01f2\u01f3\u0005!\u0000\u0000\u01f3\u01f5\u0001\u0000\u0000"+ - "\u0000\u01f4\u01f1\u0001\u0000\u0000\u0000\u01f4\u01f5\u0001\u0000\u0000"+ - "\u0000\u01f5\u01f6\u0001\u0000\u0000\u0000\u01f6\u01f7\u0003(\u0014\u0000"+ - "\u01f7]\u0001\u0000\u0000\u00003ip\u007f\u008b\u0094\u009c\u00a0\u00a8"+ - "\u00aa\u00af\u00b6\u00bb\u00c2\u00c8\u00d0\u00d2\u00dc\u00e6\u00e9\u00f5"+ - "\u00fd\u0105\u0109\u0112\u011c\u0120\u0126\u012d\u0137\u014b\u0156\u0161"+ - "\u0166\u0171\u0176\u017a\u0182\u018b\u018e\u0196\u019f\u01aa\u01b8\u01c3"+ - "\u01c6\u01cb\u01de\u01e4\u01ec\u01ef\u01f4"; + "\u00ad\b\u0005\n\u0005\f\u0005\u00b0\t\u0005\u0001\u0006\u0001\u0006\u0003"+ + "\u0006\u00b4\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0001"+ + "\u0006\u0003\u0006\u00bb\b\u0006\u0001\u0006\u0001\u0006\u0001\u0006\u0003"+ + "\u0006\u00c0\b\u0006\u0001\u0007\u0001\u0007\u0001\u0007\u0001\u0007\u0001"+ + "\u0007\u0003\u0007\u00c7\b\u0007\u0001\b\u0001\b\u0001\b\u0001\b\u0003"+ + "\b\u00cd\b\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0001\b\u0005\b\u00d5"+ + "\b\b\n\b\f\b\u00d8\t\b\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t\u0001\t"+ + "\u0001\t\u0003\t\u00e1\b\t\u0001\n\u0001\n\u0001\n\u0001\n\u0001\n\u0001"+ + "\n\u0005\n\u00e9\b\n\n\n\f\n\u00ec\t\n\u0003\n\u00ee\b\n\u0001\n\u0001"+ + "\n\u0001\u000b\u0001\u000b\u0001\u000b\u0001\f\u0001\f\u0001\f\u0005\f"+ + "\u00f8\b\f\n\f\f\f\u00fb\t\f\u0001\r\u0001\r\u0001\r\u0001\r\u0001\r\u0003"+ + "\r\u0102\b\r\u0001\u000e\u0001\u000e\u0001\u000e\u0001\u000e\u0005\u000e"+ + "\u0108\b\u000e\n\u000e\f\u000e\u010b\t\u000e\u0001\u000e\u0003\u000e\u010e"+ + "\b\u000e\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0001\u000f\u0005"+ + "\u000f\u0115\b\u000f\n\u000f\f\u000f\u0118\t\u000f\u0001\u000f\u0001\u000f"+ + "\u0001\u0010\u0001\u0010\u0001\u0010\u0001\u0011\u0001\u0011\u0003\u0011"+ + "\u0121\b\u0011\u0001\u0011\u0001\u0011\u0003\u0011\u0125\b\u0011\u0001"+ + "\u0012\u0001\u0012\u0001\u0012\u0001\u0012\u0003\u0012\u012b\b\u0012\u0001"+ + "\u0013\u0001\u0013\u0001\u0013\u0005\u0013\u0130\b\u0013\n\u0013\f\u0013"+ + "\u0133\t\u0013\u0001\u0014\u0001\u0014\u0001\u0015\u0001\u0015\u0001\u0015"+ + "\u0005\u0015\u013a\b\u0015\n\u0015\f\u0015\u013d\t\u0015\u0001\u0016\u0001"+ + "\u0016\u0001\u0016\u0005\u0016\u0142\b\u0016\n\u0016\f\u0016\u0145\t\u0016"+ + "\u0001\u0017\u0001\u0017\u0001\u0018\u0001\u0018\u0001\u0019\u0001\u0019"+ + "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019"+ + "\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019"+ + "\u0158\b\u0019\n\u0019\f\u0019\u015b\t\u0019\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u0163\b\u0019\n"+ + "\u0019\f\u0019\u0166\t\u0019\u0001\u0019\u0001\u0019\u0001\u0019\u0001"+ + "\u0019\u0001\u0019\u0001\u0019\u0005\u0019\u016e\b\u0019\n\u0019\f\u0019"+ + "\u0171\t\u0019\u0001\u0019\u0001\u0019\u0003\u0019\u0175\b\u0019\u0001"+ + "\u001a\u0001\u001a\u0001\u001a\u0001\u001b\u0001\u001b\u0001\u001b\u0001"+ + "\u001b\u0005\u001b\u017e\b\u001b\n\u001b\f\u001b\u0181\t\u001b\u0001\u001c"+ + "\u0001\u001c\u0003\u001c\u0185\b\u001c\u0001\u001c\u0001\u001c\u0003\u001c"+ + "\u0189\b\u001c\u0001\u001d\u0001\u001d\u0001\u001d\u0001\u001d\u0005\u001d"+ + "\u018f\b\u001d\n\u001d\f\u001d\u0192\t\u001d\u0001\u001d\u0001\u001d\u0001"+ + "\u001d\u0001\u001d\u0005\u001d\u0198\b\u001d\n\u001d\f\u001d\u019b\t\u001d"+ + "\u0003\u001d\u019d\b\u001d\u0001\u001e\u0001\u001e\u0001\u001e\u0001\u001e"+ + "\u0005\u001e\u01a3\b\u001e\n\u001e\f\u001e\u01a6\t\u001e\u0001\u001f\u0001"+ + "\u001f\u0001\u001f\u0001\u001f\u0005\u001f\u01ac\b\u001f\n\u001f\f\u001f"+ + "\u01af\t\u001f\u0001 \u0001 \u0001 \u0001 \u0001!\u0001!\u0001!\u0001"+ + "!\u0003!\u01b9\b!\u0001\"\u0001\"\u0001\"\u0001\"\u0001#\u0001#\u0001"+ + "#\u0001$\u0001$\u0001$\u0005$\u01c5\b$\n$\f$\u01c8\t$\u0001%\u0001%\u0001"+ + "%\u0001%\u0001&\u0001&\u0001\'\u0001\'\u0003\'\u01d2\b\'\u0001(\u0003"+ + "(\u01d5\b(\u0001(\u0001(\u0001)\u0003)\u01da\b)\u0001)\u0001)\u0001*\u0001"+ + "*\u0001+\u0001+\u0001,\u0001,\u0001,\u0001-\u0001-\u0001-\u0001-\u0001"+ + ".\u0001.\u0001.\u0001.\u0003.\u01ed\b.\u0001/\u0001/\u0001/\u0001/\u0003"+ + "/\u01f3\b/\u0001/\u0001/\u0001/\u0001/\u0005/\u01f9\b/\n/\f/\u01fc\t/"+ + "\u0003/\u01fe\b/\u00010\u00010\u00010\u00030\u0203\b0\u00010\u00010\u0001"+ + "0\u0000\u0003\u0002\n\u00101\u0000\u0002\u0004\u0006\b\n\f\u000e\u0010"+ + "\u0012\u0014\u0016\u0018\u001a\u001c\u001e \"$&(*,.02468:<>@BDFHJLNPR"+ + "TVXZ\\^`\u0000\t\u0001\u0000:;\u0001\u0000<>\u0002\u0000BBGG\u0001\u0000"+ + "AB\u0002\u0000BBKK\u0002\u0000 ##\u0001\u0000&\'\u0002\u0000%%33\u0001"+ + "\u000049\u0224\u0000b\u0001\u0000\u0000\u0000\u0002e\u0001\u0000\u0000"+ + "\u0000\u0004t\u0001\u0000\u0000\u0000\u0006\u0083\u0001\u0000\u0000\u0000"+ + "\b\u0085\u0001\u0000\u0000\u0000\n\u00a4\u0001\u0000\u0000\u0000\f\u00bf"+ + "\u0001\u0000\u0000\u0000\u000e\u00c6\u0001\u0000\u0000\u0000\u0010\u00cc"+ + "\u0001\u0000\u0000\u0000\u0012\u00e0\u0001\u0000\u0000\u0000\u0014\u00e2"+ + "\u0001\u0000\u0000\u0000\u0016\u00f1\u0001\u0000\u0000\u0000\u0018\u00f4"+ + "\u0001\u0000\u0000\u0000\u001a\u0101\u0001\u0000\u0000\u0000\u001c\u0103"+ + "\u0001\u0000\u0000\u0000\u001e\u010f\u0001\u0000\u0000\u0000 \u011b\u0001"+ + "\u0000\u0000\u0000\"\u011e\u0001\u0000\u0000\u0000$\u0126\u0001\u0000"+ + "\u0000\u0000&\u012c\u0001\u0000\u0000\u0000(\u0134\u0001\u0000\u0000\u0000"+ + "*\u0136\u0001\u0000\u0000\u0000,\u013e\u0001\u0000\u0000\u0000.\u0146"+ + "\u0001\u0000\u0000\u00000\u0148\u0001\u0000\u0000\u00002\u0174\u0001\u0000"+ + "\u0000\u00004\u0176\u0001\u0000\u0000\u00006\u0179\u0001\u0000\u0000\u0000"+ + "8\u0182\u0001\u0000\u0000\u0000:\u019c\u0001\u0000\u0000\u0000<\u019e"+ + "\u0001\u0000\u0000\u0000>\u01a7\u0001\u0000\u0000\u0000@\u01b0\u0001\u0000"+ + "\u0000\u0000B\u01b4\u0001\u0000\u0000\u0000D\u01ba\u0001\u0000\u0000\u0000"+ + "F\u01be\u0001\u0000\u0000\u0000H\u01c1\u0001\u0000\u0000\u0000J\u01c9"+ + "\u0001\u0000\u0000\u0000L\u01cd\u0001\u0000\u0000\u0000N\u01d1\u0001\u0000"+ + "\u0000\u0000P\u01d4\u0001\u0000\u0000\u0000R\u01d9\u0001\u0000\u0000\u0000"+ + "T\u01dd\u0001\u0000\u0000\u0000V\u01df\u0001\u0000\u0000\u0000X\u01e1"+ + "\u0001\u0000\u0000\u0000Z\u01e4\u0001\u0000\u0000\u0000\\\u01ec\u0001"+ + "\u0000\u0000\u0000^\u01ee\u0001\u0000\u0000\u0000`\u0202\u0001\u0000\u0000"+ + "\u0000bc\u0003\u0002\u0001\u0000cd\u0005\u0000\u0000\u0001d\u0001\u0001"+ + "\u0000\u0000\u0000ef\u0006\u0001\uffff\uffff\u0000fg\u0003\u0004\u0002"+ + "\u0000gm\u0001\u0000\u0000\u0000hi\n\u0001\u0000\u0000ij\u0005\u001a\u0000"+ + "\u0000jl\u0003\u0006\u0003\u0000kh\u0001\u0000\u0000\u0000lo\u0001\u0000"+ + "\u0000\u0000mk\u0001\u0000\u0000\u0000mn\u0001\u0000\u0000\u0000n\u0003"+ + "\u0001\u0000\u0000\u0000om\u0001\u0000\u0000\u0000pu\u0003X,\u0000qu\u0003"+ + "\u001c\u000e\u0000ru\u0003\u0016\u000b\u0000su\u0003\\.\u0000tp\u0001"+ + "\u0000\u0000\u0000tq\u0001\u0000\u0000\u0000tr\u0001\u0000\u0000\u0000"+ + "ts\u0001\u0000\u0000\u0000u\u0005\u0001\u0000\u0000\u0000v\u0084\u0003"+ + " \u0010\u0000w\u0084\u0003$\u0012\u0000x\u0084\u00034\u001a\u0000y\u0084"+ + "\u0003:\u001d\u0000z\u0084\u00036\u001b\u0000{\u0084\u0003\"\u0011\u0000"+ + "|\u0084\u0003\b\u0004\u0000}\u0084\u0003<\u001e\u0000~\u0084\u0003>\u001f"+ + "\u0000\u007f\u0084\u0003B!\u0000\u0080\u0084\u0003D\"\u0000\u0081\u0084"+ + "\u0003^/\u0000\u0082\u0084\u0003F#\u0000\u0083v\u0001\u0000\u0000\u0000"+ + "\u0083w\u0001\u0000\u0000\u0000\u0083x\u0001\u0000\u0000\u0000\u0083y"+ + "\u0001\u0000\u0000\u0000\u0083z\u0001\u0000\u0000\u0000\u0083{\u0001\u0000"+ + "\u0000\u0000\u0083|\u0001\u0000\u0000\u0000\u0083}\u0001\u0000\u0000\u0000"+ + "\u0083~\u0001\u0000\u0000\u0000\u0083\u007f\u0001\u0000\u0000\u0000\u0083"+ + "\u0080\u0001\u0000\u0000\u0000\u0083\u0081\u0001\u0000\u0000\u0000\u0083"+ + "\u0082\u0001\u0000\u0000\u0000\u0084\u0007\u0001\u0000\u0000\u0000\u0085"+ + "\u0086\u0005\u0012\u0000\u0000\u0086\u0087\u0003\n\u0005\u0000\u0087\t"+ + "\u0001\u0000\u0000\u0000\u0088\u0089\u0006\u0005\uffff\uffff\u0000\u0089"+ + "\u008a\u0005,\u0000\u0000\u008a\u00a5\u0003\n\u0005\u0007\u008b\u00a5"+ + "\u0003\u000e\u0007\u0000\u008c\u00a5\u0003\f\u0006\u0000\u008d\u008f\u0003"+ + "\u000e\u0007\u0000\u008e\u0090\u0005,\u0000\u0000\u008f\u008e\u0001\u0000"+ + "\u0000\u0000\u008f\u0090\u0001\u0000\u0000\u0000\u0090\u0091\u0001\u0000"+ + "\u0000\u0000\u0091\u0092\u0005)\u0000\u0000\u0092\u0093\u0005(\u0000\u0000"+ + "\u0093\u0098\u0003\u000e\u0007\u0000\u0094\u0095\u0005\"\u0000\u0000\u0095"+ + "\u0097\u0003\u000e\u0007\u0000\u0096\u0094\u0001\u0000\u0000\u0000\u0097"+ + "\u009a\u0001\u0000\u0000\u0000\u0098\u0096\u0001\u0000\u0000\u0000\u0098"+ + "\u0099\u0001\u0000\u0000\u0000\u0099\u009b\u0001\u0000\u0000\u0000\u009a"+ + "\u0098\u0001\u0000\u0000\u0000\u009b\u009c\u00052\u0000\u0000\u009c\u00a5"+ + "\u0001\u0000\u0000\u0000\u009d\u009e\u0003\u000e\u0007\u0000\u009e\u00a0"+ + "\u0005*\u0000\u0000\u009f\u00a1\u0005,\u0000\u0000\u00a0\u009f\u0001\u0000"+ + "\u0000\u0000\u00a0\u00a1\u0001\u0000\u0000\u0000\u00a1\u00a2\u0001\u0000"+ + "\u0000\u0000\u00a2\u00a3\u0005-\u0000\u0000\u00a3\u00a5\u0001\u0000\u0000"+ + "\u0000\u00a4\u0088\u0001\u0000\u0000\u0000\u00a4\u008b\u0001\u0000\u0000"+ + "\u0000\u00a4\u008c\u0001\u0000\u0000\u0000\u00a4\u008d\u0001\u0000\u0000"+ + "\u0000\u00a4\u009d\u0001\u0000\u0000\u0000\u00a5\u00ae\u0001\u0000\u0000"+ + "\u0000\u00a6\u00a7\n\u0004\u0000\u0000\u00a7\u00a8\u0005\u001f\u0000\u0000"+ + "\u00a8\u00ad\u0003\n\u0005\u0005\u00a9\u00aa\n\u0003\u0000\u0000\u00aa"+ + "\u00ab\u0005/\u0000\u0000\u00ab\u00ad\u0003\n\u0005\u0004\u00ac\u00a6"+ + "\u0001\u0000\u0000\u0000\u00ac\u00a9\u0001\u0000\u0000\u0000\u00ad\u00b0"+ + "\u0001\u0000\u0000\u0000\u00ae\u00ac\u0001\u0000\u0000\u0000\u00ae\u00af"+ + "\u0001\u0000\u0000\u0000\u00af\u000b\u0001\u0000\u0000\u0000\u00b0\u00ae"+ + "\u0001\u0000\u0000\u0000\u00b1\u00b3\u0003\u000e\u0007\u0000\u00b2\u00b4"+ + "\u0005,\u0000\u0000\u00b3\u00b2\u0001\u0000\u0000\u0000\u00b3\u00b4\u0001"+ + "\u0000\u0000\u0000\u00b4\u00b5\u0001\u0000\u0000\u0000\u00b5\u00b6\u0005"+ + "+\u0000\u0000\u00b6\u00b7\u0003T*\u0000\u00b7\u00c0\u0001\u0000\u0000"+ + "\u0000\u00b8\u00ba\u0003\u000e\u0007\u0000\u00b9\u00bb\u0005,\u0000\u0000"+ + "\u00ba\u00b9\u0001\u0000\u0000\u0000\u00ba\u00bb\u0001\u0000\u0000\u0000"+ + "\u00bb\u00bc\u0001\u0000\u0000\u0000\u00bc\u00bd\u00051\u0000\u0000\u00bd"+ + "\u00be\u0003T*\u0000\u00be\u00c0\u0001\u0000\u0000\u0000\u00bf\u00b1\u0001"+ + "\u0000\u0000\u0000\u00bf\u00b8\u0001\u0000\u0000\u0000\u00c0\r\u0001\u0000"+ + "\u0000\u0000\u00c1\u00c7\u0003\u0010\b\u0000\u00c2\u00c3\u0003\u0010\b"+ + "\u0000\u00c3\u00c4\u0003V+\u0000\u00c4\u00c5\u0003\u0010\b\u0000\u00c5"+ + "\u00c7\u0001\u0000\u0000\u0000\u00c6\u00c1\u0001\u0000\u0000\u0000\u00c6"+ + "\u00c2\u0001\u0000\u0000\u0000\u00c7\u000f\u0001\u0000\u0000\u0000\u00c8"+ + "\u00c9\u0006\b\uffff\uffff\u0000\u00c9\u00cd\u0003\u0012\t\u0000\u00ca"+ + "\u00cb\u0007\u0000\u0000\u0000\u00cb\u00cd\u0003\u0010\b\u0003\u00cc\u00c8"+ + "\u0001\u0000\u0000\u0000\u00cc\u00ca\u0001\u0000\u0000\u0000\u00cd\u00d6"+ + "\u0001\u0000\u0000\u0000\u00ce\u00cf\n\u0002\u0000\u0000\u00cf\u00d0\u0007"+ + "\u0001\u0000\u0000\u00d0\u00d5\u0003\u0010\b\u0003\u00d1\u00d2\n\u0001"+ + "\u0000\u0000\u00d2\u00d3\u0007\u0000\u0000\u0000\u00d3\u00d5\u0003\u0010"+ + "\b\u0002\u00d4\u00ce\u0001\u0000\u0000\u0000\u00d4\u00d1\u0001\u0000\u0000"+ + "\u0000\u00d5\u00d8\u0001\u0000\u0000\u0000\u00d6\u00d4\u0001\u0000\u0000"+ + "\u0000\u00d6\u00d7\u0001\u0000\u0000\u0000\u00d7\u0011\u0001\u0000\u0000"+ + "\u0000\u00d8\u00d6\u0001\u0000\u0000\u0000\u00d9\u00e1\u00032\u0019\u0000"+ + "\u00da\u00e1\u0003*\u0015\u0000\u00db\u00e1\u0003\u0014\n\u0000\u00dc"+ + "\u00dd\u0005(\u0000\u0000\u00dd\u00de\u0003\n\u0005\u0000\u00de\u00df"+ + "\u00052\u0000\u0000\u00df\u00e1\u0001\u0000\u0000\u0000\u00e0\u00d9\u0001"+ + "\u0000\u0000\u0000\u00e0\u00da\u0001\u0000\u0000\u0000\u00e0\u00db\u0001"+ + "\u0000\u0000\u0000\u00e0\u00dc\u0001\u0000\u0000\u0000\u00e1\u0013\u0001"+ + "\u0000\u0000\u0000\u00e2\u00e3\u0003.\u0017\u0000\u00e3\u00ed\u0005(\u0000"+ + "\u0000\u00e4\u00ee\u0005<\u0000\u0000\u00e5\u00ea\u0003\n\u0005\u0000"+ + "\u00e6\u00e7\u0005\"\u0000\u0000\u00e7\u00e9\u0003\n\u0005\u0000\u00e8"+ + "\u00e6\u0001\u0000\u0000\u0000\u00e9\u00ec\u0001\u0000\u0000\u0000\u00ea"+ + "\u00e8\u0001\u0000\u0000\u0000\u00ea\u00eb\u0001\u0000\u0000\u0000\u00eb"+ + "\u00ee\u0001\u0000\u0000\u0000\u00ec\u00ea\u0001\u0000\u0000\u0000\u00ed"+ + "\u00e4\u0001\u0000\u0000\u0000\u00ed\u00e5\u0001\u0000\u0000\u0000\u00ed"+ + "\u00ee\u0001\u0000\u0000\u0000\u00ee\u00ef\u0001\u0000\u0000\u0000\u00ef"+ + "\u00f0\u00052\u0000\u0000\u00f0\u0015\u0001\u0000\u0000\u0000\u00f1\u00f2"+ + "\u0005\u000e\u0000\u0000\u00f2\u00f3\u0003\u0018\f\u0000\u00f3\u0017\u0001"+ + "\u0000\u0000\u0000\u00f4\u00f9\u0003\u001a\r\u0000\u00f5\u00f6\u0005\""+ + "\u0000\u0000\u00f6\u00f8\u0003\u001a\r\u0000\u00f7\u00f5\u0001\u0000\u0000"+ + "\u0000\u00f8\u00fb\u0001\u0000\u0000\u0000\u00f9\u00f7\u0001\u0000\u0000"+ + "\u0000\u00f9\u00fa\u0001\u0000\u0000\u0000\u00fa\u0019\u0001\u0000\u0000"+ + "\u0000\u00fb\u00f9\u0001\u0000\u0000\u0000\u00fc\u0102\u0003\n\u0005\u0000"+ + "\u00fd\u00fe\u0003*\u0015\u0000\u00fe\u00ff\u0005!\u0000\u0000\u00ff\u0100"+ + "\u0003\n\u0005\u0000\u0100\u0102\u0001\u0000\u0000\u0000\u0101\u00fc\u0001"+ + "\u0000\u0000\u0000\u0101\u00fd\u0001\u0000\u0000\u0000\u0102\u001b\u0001"+ + "\u0000\u0000\u0000\u0103\u0104\u0005\u0006\u0000\u0000\u0104\u0109\u0003"+ + "(\u0014\u0000\u0105\u0106\u0005\"\u0000\u0000\u0106\u0108\u0003(\u0014"+ + "\u0000\u0107\u0105\u0001\u0000\u0000\u0000\u0108\u010b\u0001\u0000\u0000"+ + "\u0000\u0109\u0107\u0001\u0000\u0000\u0000\u0109\u010a\u0001\u0000\u0000"+ + "\u0000\u010a\u010d\u0001\u0000\u0000\u0000\u010b\u0109\u0001\u0000\u0000"+ + "\u0000\u010c\u010e\u0003\u001e\u000f\u0000\u010d\u010c\u0001\u0000\u0000"+ + "\u0000\u010d\u010e\u0001\u0000\u0000\u0000\u010e\u001d\u0001\u0000\u0000"+ + "\u0000\u010f\u0110\u0005?\u0000\u0000\u0110\u0111\u0005F\u0000\u0000\u0111"+ + "\u0116\u0003(\u0014\u0000\u0112\u0113\u0005\"\u0000\u0000\u0113\u0115"+ + "\u0003(\u0014\u0000\u0114\u0112\u0001\u0000\u0000\u0000\u0115\u0118\u0001"+ + "\u0000\u0000\u0000\u0116\u0114\u0001\u0000\u0000\u0000\u0116\u0117\u0001"+ + "\u0000\u0000\u0000\u0117\u0119\u0001\u0000\u0000\u0000\u0118\u0116\u0001"+ + "\u0000\u0000\u0000\u0119\u011a\u0005@\u0000\u0000\u011a\u001f\u0001\u0000"+ + "\u0000\u0000\u011b\u011c\u0005\u0004\u0000\u0000\u011c\u011d\u0003\u0018"+ + "\f\u0000\u011d!\u0001\u0000\u0000\u0000\u011e\u0120\u0005\u0011\u0000"+ + "\u0000\u011f\u0121\u0003\u0018\f\u0000\u0120\u011f\u0001\u0000\u0000\u0000"+ + "\u0120\u0121\u0001\u0000\u0000\u0000\u0121\u0124\u0001\u0000\u0000\u0000"+ + "\u0122\u0123\u0005\u001e\u0000\u0000\u0123\u0125\u0003&\u0013\u0000\u0124"+ + "\u0122\u0001\u0000\u0000\u0000\u0124\u0125\u0001\u0000\u0000\u0000\u0125"+ + "#\u0001\u0000\u0000\u0000\u0126\u0127\u0005\b\u0000\u0000\u0127\u012a"+ + "\u0003\u0018\f\u0000\u0128\u0129\u0005\u001e\u0000\u0000\u0129\u012b\u0003"+ + "&\u0013\u0000\u012a\u0128\u0001\u0000\u0000\u0000\u012a\u012b\u0001\u0000"+ + "\u0000\u0000\u012b%\u0001\u0000\u0000\u0000\u012c\u0131\u0003*\u0015\u0000"+ + "\u012d\u012e\u0005\"\u0000\u0000\u012e\u0130\u0003*\u0015\u0000\u012f"+ + "\u012d\u0001\u0000\u0000\u0000\u0130\u0133\u0001\u0000\u0000\u0000\u0131"+ + "\u012f\u0001\u0000\u0000\u0000\u0131\u0132\u0001\u0000\u0000\u0000\u0132"+ + "\'\u0001\u0000\u0000\u0000\u0133\u0131\u0001\u0000\u0000\u0000\u0134\u0135"+ + "\u0007\u0002\u0000\u0000\u0135)\u0001\u0000\u0000\u0000\u0136\u013b\u0003"+ + ".\u0017\u0000\u0137\u0138\u0005$\u0000\u0000\u0138\u013a\u0003.\u0017"+ + "\u0000\u0139\u0137\u0001\u0000\u0000\u0000\u013a\u013d\u0001\u0000\u0000"+ + "\u0000\u013b\u0139\u0001\u0000\u0000\u0000\u013b\u013c\u0001\u0000\u0000"+ + "\u0000\u013c+\u0001\u0000\u0000\u0000\u013d\u013b\u0001\u0000\u0000\u0000"+ + "\u013e\u0143\u00030\u0018\u0000\u013f\u0140\u0005$\u0000\u0000\u0140\u0142"+ + "\u00030\u0018\u0000\u0141\u013f\u0001\u0000\u0000\u0000\u0142\u0145\u0001"+ + "\u0000\u0000\u0000\u0143\u0141\u0001\u0000\u0000\u0000\u0143\u0144\u0001"+ + "\u0000\u0000\u0000\u0144-\u0001\u0000\u0000\u0000\u0145\u0143\u0001\u0000"+ + "\u0000\u0000\u0146\u0147\u0007\u0003\u0000\u0000\u0147/\u0001\u0000\u0000"+ + "\u0000\u0148\u0149\u0007\u0004\u0000\u0000\u01491\u0001\u0000\u0000\u0000"+ + "\u014a\u0175\u0005-\u0000\u0000\u014b\u014c\u0003R)\u0000\u014c\u014d"+ + "\u0005A\u0000\u0000\u014d\u0175\u0001\u0000\u0000\u0000\u014e\u0175\u0003"+ + "P(\u0000\u014f\u0175\u0003R)\u0000\u0150\u0175\u0003L&\u0000\u0151\u0175"+ + "\u00050\u0000\u0000\u0152\u0175\u0003T*\u0000\u0153\u0154\u0005?\u0000"+ + "\u0000\u0154\u0159\u0003N\'\u0000\u0155\u0156\u0005\"\u0000\u0000\u0156"+ + "\u0158\u0003N\'\u0000\u0157\u0155\u0001\u0000\u0000\u0000\u0158\u015b"+ + "\u0001\u0000\u0000\u0000\u0159\u0157\u0001\u0000\u0000\u0000\u0159\u015a"+ + "\u0001\u0000\u0000\u0000\u015a\u015c\u0001\u0000\u0000\u0000\u015b\u0159"+ + "\u0001\u0000\u0000\u0000\u015c\u015d\u0005@\u0000\u0000\u015d\u0175\u0001"+ + "\u0000\u0000\u0000\u015e\u015f\u0005?\u0000\u0000\u015f\u0164\u0003L&"+ + "\u0000\u0160\u0161\u0005\"\u0000\u0000\u0161\u0163\u0003L&\u0000\u0162"+ + "\u0160\u0001\u0000\u0000\u0000\u0163\u0166\u0001\u0000\u0000\u0000\u0164"+ + "\u0162\u0001\u0000\u0000\u0000\u0164\u0165\u0001\u0000\u0000\u0000\u0165"+ + "\u0167\u0001\u0000\u0000\u0000\u0166\u0164\u0001\u0000\u0000\u0000\u0167"+ + "\u0168\u0005@\u0000\u0000\u0168\u0175\u0001\u0000\u0000\u0000\u0169\u016a"+ + "\u0005?\u0000\u0000\u016a\u016f\u0003T*\u0000\u016b\u016c\u0005\"\u0000"+ + "\u0000\u016c\u016e\u0003T*\u0000\u016d\u016b\u0001\u0000\u0000\u0000\u016e"+ + "\u0171\u0001\u0000\u0000\u0000\u016f\u016d\u0001\u0000\u0000\u0000\u016f"+ + "\u0170\u0001\u0000\u0000\u0000\u0170\u0172\u0001\u0000\u0000\u0000\u0171"+ + "\u016f\u0001\u0000\u0000\u0000\u0172\u0173\u0005@\u0000\u0000\u0173\u0175"+ + "\u0001\u0000\u0000\u0000\u0174\u014a\u0001\u0000\u0000\u0000\u0174\u014b"+ + "\u0001\u0000\u0000\u0000\u0174\u014e\u0001\u0000\u0000\u0000\u0174\u014f"+ + "\u0001\u0000\u0000\u0000\u0174\u0150\u0001\u0000\u0000\u0000\u0174\u0151"+ + "\u0001\u0000\u0000\u0000\u0174\u0152\u0001\u0000\u0000\u0000\u0174\u0153"+ + "\u0001\u0000\u0000\u0000\u0174\u015e\u0001\u0000\u0000\u0000\u0174\u0169"+ + "\u0001\u0000\u0000\u0000\u01753\u0001\u0000\u0000\u0000\u0176\u0177\u0005"+ + "\n\u0000\u0000\u0177\u0178\u0005\u001c\u0000\u0000\u01785\u0001\u0000"+ + "\u0000\u0000\u0179\u017a\u0005\u0010\u0000\u0000\u017a\u017f\u00038\u001c"+ + "\u0000\u017b\u017c\u0005\"\u0000\u0000\u017c\u017e\u00038\u001c\u0000"+ + "\u017d\u017b\u0001\u0000\u0000\u0000\u017e\u0181\u0001\u0000\u0000\u0000"+ + "\u017f\u017d\u0001\u0000\u0000\u0000\u017f\u0180\u0001\u0000\u0000\u0000"+ + "\u01807\u0001\u0000\u0000\u0000\u0181\u017f\u0001\u0000\u0000\u0000\u0182"+ + "\u0184\u0003\n\u0005\u0000\u0183\u0185\u0007\u0005\u0000\u0000\u0184\u0183"+ + "\u0001\u0000\u0000\u0000\u0184\u0185\u0001\u0000\u0000\u0000\u0185\u0188"+ + "\u0001\u0000\u0000\u0000\u0186\u0187\u0005.\u0000\u0000\u0187\u0189\u0007"+ + "\u0006\u0000\u0000\u0188\u0186\u0001\u0000\u0000\u0000\u0188\u0189\u0001"+ + "\u0000\u0000\u0000\u01899\u0001\u0000\u0000\u0000\u018a\u018b\u0005\t"+ + "\u0000\u0000\u018b\u0190\u0003,\u0016\u0000\u018c\u018d\u0005\"\u0000"+ + "\u0000\u018d\u018f\u0003,\u0016\u0000\u018e\u018c\u0001\u0000\u0000\u0000"+ + "\u018f\u0192\u0001\u0000\u0000\u0000\u0190\u018e\u0001\u0000\u0000\u0000"+ + "\u0190\u0191\u0001\u0000\u0000\u0000\u0191\u019d\u0001\u0000\u0000\u0000"+ + "\u0192\u0190\u0001\u0000\u0000\u0000\u0193\u0194\u0005\f\u0000\u0000\u0194"+ + "\u0199\u0003,\u0016\u0000\u0195\u0196\u0005\"\u0000\u0000\u0196\u0198"+ + "\u0003,\u0016\u0000\u0197\u0195\u0001\u0000\u0000\u0000\u0198\u019b\u0001"+ + "\u0000\u0000\u0000\u0199\u0197\u0001\u0000\u0000\u0000\u0199\u019a\u0001"+ + "\u0000\u0000\u0000\u019a\u019d\u0001\u0000\u0000\u0000\u019b\u0199\u0001"+ + "\u0000\u0000\u0000\u019c\u018a\u0001\u0000\u0000\u0000\u019c\u0193\u0001"+ + "\u0000\u0000\u0000\u019d;\u0001\u0000\u0000\u0000\u019e\u019f\u0005\u0002"+ + "\u0000\u0000\u019f\u01a4\u0003,\u0016\u0000\u01a0\u01a1\u0005\"\u0000"+ + "\u0000\u01a1\u01a3\u0003,\u0016\u0000\u01a2\u01a0\u0001\u0000\u0000\u0000"+ + "\u01a3\u01a6\u0001\u0000\u0000\u0000\u01a4\u01a2\u0001\u0000\u0000\u0000"+ + "\u01a4\u01a5\u0001\u0000\u0000\u0000\u01a5=\u0001\u0000\u0000\u0000\u01a6"+ + "\u01a4\u0001\u0000\u0000\u0000\u01a7\u01a8\u0005\r\u0000\u0000\u01a8\u01ad"+ + "\u0003@ \u0000\u01a9\u01aa\u0005\"\u0000\u0000\u01aa\u01ac\u0003@ \u0000"+ + "\u01ab\u01a9\u0001\u0000\u0000\u0000\u01ac\u01af\u0001\u0000\u0000\u0000"+ + "\u01ad\u01ab\u0001\u0000\u0000\u0000\u01ad\u01ae\u0001\u0000\u0000\u0000"+ + "\u01ae?\u0001\u0000\u0000\u0000\u01af\u01ad\u0001\u0000\u0000\u0000\u01b0"+ + "\u01b1\u0003,\u0016\u0000\u01b1\u01b2\u0005O\u0000\u0000\u01b2\u01b3\u0003"+ + ",\u0016\u0000\u01b3A\u0001\u0000\u0000\u0000\u01b4\u01b5\u0005\u0001\u0000"+ + "\u0000\u01b5\u01b6\u0003\u0012\t\u0000\u01b6\u01b8\u0003T*\u0000\u01b7"+ + "\u01b9\u0003H$\u0000\u01b8\u01b7\u0001\u0000\u0000\u0000\u01b8\u01b9\u0001"+ + "\u0000\u0000\u0000\u01b9C\u0001\u0000\u0000\u0000\u01ba\u01bb\u0005\u0007"+ + "\u0000\u0000\u01bb\u01bc\u0003\u0012\t\u0000\u01bc\u01bd\u0003T*\u0000"+ + "\u01bdE\u0001\u0000\u0000\u0000\u01be\u01bf\u0005\u000b\u0000\u0000\u01bf"+ + "\u01c0\u0003*\u0015\u0000\u01c0G\u0001\u0000\u0000\u0000\u01c1\u01c6\u0003"+ + "J%\u0000\u01c2\u01c3\u0005\"\u0000\u0000\u01c3\u01c5\u0003J%\u0000\u01c4"+ + "\u01c2\u0001\u0000\u0000\u0000\u01c5\u01c8\u0001\u0000\u0000\u0000\u01c6"+ + "\u01c4\u0001\u0000\u0000\u0000\u01c6\u01c7\u0001\u0000\u0000\u0000\u01c7"+ + "I\u0001\u0000\u0000\u0000\u01c8\u01c6\u0001\u0000\u0000\u0000\u01c9\u01ca"+ + "\u0003.\u0017\u0000\u01ca\u01cb\u0005!\u0000\u0000\u01cb\u01cc\u00032"+ + "\u0019\u0000\u01ccK\u0001\u0000\u0000\u0000\u01cd\u01ce\u0007\u0007\u0000"+ + "\u0000\u01ceM\u0001\u0000\u0000\u0000\u01cf\u01d2\u0003P(\u0000\u01d0"+ + "\u01d2\u0003R)\u0000\u01d1\u01cf\u0001\u0000\u0000\u0000\u01d1\u01d0\u0001"+ + "\u0000\u0000\u0000\u01d2O\u0001\u0000\u0000\u0000\u01d3\u01d5\u0007\u0000"+ + "\u0000\u0000\u01d4\u01d3\u0001\u0000\u0000\u0000\u01d4\u01d5\u0001\u0000"+ + "\u0000\u0000\u01d5\u01d6\u0001\u0000\u0000\u0000\u01d6\u01d7\u0005\u001d"+ + "\u0000\u0000\u01d7Q\u0001\u0000\u0000\u0000\u01d8\u01da\u0007\u0000\u0000"+ + "\u0000\u01d9\u01d8\u0001\u0000\u0000\u0000\u01d9\u01da\u0001\u0000\u0000"+ + "\u0000\u01da\u01db\u0001\u0000\u0000\u0000\u01db\u01dc\u0005\u001c\u0000"+ + "\u0000\u01dcS\u0001\u0000\u0000\u0000\u01dd\u01de\u0005\u001b\u0000\u0000"+ + "\u01deU\u0001\u0000\u0000\u0000\u01df\u01e0\u0007\b\u0000\u0000\u01e0"+ + "W\u0001\u0000\u0000\u0000\u01e1\u01e2\u0005\u0005\u0000\u0000\u01e2\u01e3"+ + "\u0003Z-\u0000\u01e3Y\u0001\u0000\u0000\u0000\u01e4\u01e5\u0005?\u0000"+ + "\u0000\u01e5\u01e6\u0003\u0002\u0001\u0000\u01e6\u01e7\u0005@\u0000\u0000"+ + "\u01e7[\u0001\u0000\u0000\u0000\u01e8\u01e9\u0005\u000f\u0000\u0000\u01e9"+ + "\u01ed\u0005^\u0000\u0000\u01ea\u01eb\u0005\u000f\u0000\u0000\u01eb\u01ed"+ + "\u0005_\u0000\u0000\u01ec\u01e8\u0001\u0000\u0000\u0000\u01ec\u01ea\u0001"+ + "\u0000\u0000\u0000\u01ed]\u0001\u0000\u0000\u0000\u01ee\u01ef\u0005\u0003"+ + "\u0000\u0000\u01ef\u01f2\u0003(\u0014\u0000\u01f0\u01f1\u0005S\u0000\u0000"+ + "\u01f1\u01f3\u0003,\u0016\u0000\u01f2\u01f0\u0001\u0000\u0000\u0000\u01f2"+ + "\u01f3\u0001\u0000\u0000\u0000\u01f3\u01fd\u0001\u0000\u0000\u0000\u01f4"+ + "\u01f5\u0005T\u0000\u0000\u01f5\u01fa\u0003`0\u0000\u01f6\u01f7\u0005"+ + "\"\u0000\u0000\u01f7\u01f9\u0003`0\u0000\u01f8\u01f6\u0001\u0000\u0000"+ + "\u0000\u01f9\u01fc\u0001\u0000\u0000\u0000\u01fa\u01f8\u0001\u0000\u0000"+ + "\u0000\u01fa\u01fb\u0001\u0000\u0000\u0000\u01fb\u01fe\u0001\u0000\u0000"+ + "\u0000\u01fc\u01fa\u0001\u0000\u0000\u0000\u01fd\u01f4\u0001\u0000\u0000"+ + "\u0000\u01fd\u01fe\u0001\u0000\u0000\u0000\u01fe_\u0001\u0000\u0000\u0000"+ + "\u01ff\u0200\u0003,\u0016\u0000\u0200\u0201\u0005!\u0000\u0000\u0201\u0203"+ + "\u0001\u0000\u0000\u0000\u0202\u01ff\u0001\u0000\u0000\u0000\u0202\u0203"+ + "\u0001\u0000\u0000\u0000\u0203\u0204\u0001\u0000\u0000\u0000\u0204\u0205"+ + "\u0003,\u0016\u0000\u0205a\u0001\u0000\u0000\u00004mt\u0083\u008f\u0098"+ + "\u00a0\u00a4\u00ac\u00ae\u00b3\u00ba\u00bf\u00c6\u00cc\u00d4\u00d6\u00e0"+ + "\u00ea\u00ed\u00f9\u0101\u0109\u010d\u0116\u0120\u0124\u012a\u0131\u013b"+ + "\u0143\u0159\u0164\u016f\u0174\u017f\u0184\u0188\u0190\u0199\u019c\u01a4"+ + "\u01ad\u01b8\u01c6\u01d1\u01d4\u01d9\u01ec\u01f2\u01fa\u01fd\u0202"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java index 3137eff0b6550..73b529cd2be92 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseListener.java @@ -401,13 +401,13 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { * *

The default implementation does nothing.

*/ - @Override public void enterSourceIdentifier(EsqlBaseParser.SourceIdentifierContext ctx) { } + @Override public void enterFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { } /** * {@inheritDoc} * *

The default implementation does nothing.

*/ - @Override public void exitSourceIdentifier(EsqlBaseParser.SourceIdentifierContext ctx) { } + @Override public void exitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { } /** * {@inheritDoc} * @@ -420,6 +420,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

The default implementation does nothing.

*/ @Override public void exitQualifiedName(EsqlBaseParser.QualifiedNameContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterQualifiedNamePattern(EsqlBaseParser.QualifiedNamePatternContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitQualifiedNamePattern(EsqlBaseParser.QualifiedNamePatternContext ctx) { } /** * {@inheritDoc} * @@ -432,6 +444,18 @@ public class EsqlBaseParserBaseListener implements EsqlBaseParserListener { *

The default implementation does nothing.

*/ @Override public void exitIdentifier(EsqlBaseParser.IdentifierContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterIdentifierPattern(EsqlBaseParser.IdentifierPatternContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitIdentifierPattern(EsqlBaseParser.IdentifierPatternContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java index d7b2f359e3c83..d35481745cecc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserBaseVisitor.java @@ -242,7 +242,7 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im *

The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

*/ - @Override public T visitSourceIdentifier(EsqlBaseParser.SourceIdentifierContext ctx) { return visitChildren(ctx); } + @Override public T visitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -250,6 +250,13 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitQualifiedName(EsqlBaseParser.QualifiedNameContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitQualifiedNamePattern(EsqlBaseParser.QualifiedNamePatternContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -257,6 +264,13 @@ public class EsqlBaseParserBaseVisitor extends AbstractParseTreeVisitor im * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitIdentifier(EsqlBaseParser.IdentifierContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitIdentifierPattern(EsqlBaseParser.IdentifierPatternContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java index dd6cdaacddbef..6c8cd7272d8dc 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserListener.java @@ -362,15 +362,15 @@ public interface EsqlBaseParserListener extends ParseTreeListener { */ void exitGrouping(EsqlBaseParser.GroupingContext ctx); /** - * Enter a parse tree produced by {@link EsqlBaseParser#sourceIdentifier}. + * Enter a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. * @param ctx the parse tree */ - void enterSourceIdentifier(EsqlBaseParser.SourceIdentifierContext ctx); + void enterFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); /** - * Exit a parse tree produced by {@link EsqlBaseParser#sourceIdentifier}. + * Exit a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. * @param ctx the parse tree */ - void exitSourceIdentifier(EsqlBaseParser.SourceIdentifierContext ctx); + void exitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#qualifiedName}. * @param ctx the parse tree @@ -381,6 +381,16 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitQualifiedName(EsqlBaseParser.QualifiedNameContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#qualifiedNamePattern}. + * @param ctx the parse tree + */ + void enterQualifiedNamePattern(EsqlBaseParser.QualifiedNamePatternContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#qualifiedNamePattern}. + * @param ctx the parse tree + */ + void exitQualifiedNamePattern(EsqlBaseParser.QualifiedNamePatternContext ctx); /** * Enter a parse tree produced by {@link EsqlBaseParser#identifier}. * @param ctx the parse tree @@ -391,6 +401,16 @@ public interface EsqlBaseParserListener extends ParseTreeListener { * @param ctx the parse tree */ void exitIdentifier(EsqlBaseParser.IdentifierContext ctx); + /** + * Enter a parse tree produced by {@link EsqlBaseParser#identifierPattern}. + * @param ctx the parse tree + */ + void enterIdentifierPattern(EsqlBaseParser.IdentifierPatternContext ctx); + /** + * Exit a parse tree produced by {@link EsqlBaseParser#identifierPattern}. + * @param ctx the parse tree + */ + void exitIdentifierPattern(EsqlBaseParser.IdentifierPatternContext ctx); /** * Enter a parse tree produced by the {@code nullLiteral} * labeled alternative in {@link EsqlBaseParser#constant}. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java index 35297f3d4f336..2fe5de566dbaf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/EsqlBaseParserVisitor.java @@ -220,23 +220,35 @@ public interface EsqlBaseParserVisitor extends ParseTreeVisitor { */ T visitGrouping(EsqlBaseParser.GroupingContext ctx); /** - * Visit a parse tree produced by {@link EsqlBaseParser#sourceIdentifier}. + * Visit a parse tree produced by {@link EsqlBaseParser#fromIdentifier}. * @param ctx the parse tree * @return the visitor result */ - T visitSourceIdentifier(EsqlBaseParser.SourceIdentifierContext ctx); + T visitFromIdentifier(EsqlBaseParser.FromIdentifierContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#qualifiedName}. * @param ctx the parse tree * @return the visitor result */ T visitQualifiedName(EsqlBaseParser.QualifiedNameContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#qualifiedNamePattern}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitQualifiedNamePattern(EsqlBaseParser.QualifiedNamePatternContext ctx); /** * Visit a parse tree produced by {@link EsqlBaseParser#identifier}. * @param ctx the parse tree * @return the visitor result */ T visitIdentifier(EsqlBaseParser.IdentifierContext ctx); + /** + * Visit a parse tree produced by {@link EsqlBaseParser#identifierPattern}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitIdentifierPattern(EsqlBaseParser.IdentifierPatternContext ctx); /** * Visit a parse tree produced by the {@code nullLiteral} * labeled alternative in {@link EsqlBaseParser#constant}. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java index f24324fac2fbd..3b1ef475350b1 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/ExpressionBuilder.java @@ -207,10 +207,21 @@ public UnresolvedAttribute visitQualifiedName(EsqlBaseParser.QualifiedNameContex return null; } - return new UnresolvedAttribute( - source(ctx), - Strings.collectionToDelimitedString(visitList(this, ctx.identifier(), String.class), ".") - ); + List strings = visitList(this, ctx.identifier(), String.class); + return new UnresolvedAttribute(source(ctx), Strings.collectionToDelimitedString(strings, ".")); + } + + @Override + public NamedExpression visitQualifiedNamePattern(EsqlBaseParser.QualifiedNamePatternContext ctx) { + if (ctx == null) { + return null; + } + + List strings = visitList(this, ctx.identifierPattern(), String.class); + var src = source(ctx); + return strings.size() == 1 && strings.get(0).equals(WILDCARD) + ? new UnresolvedStar(src, null) + : new UnresolvedAttribute(src, Strings.collectionToDelimitedString(strings, ".")); } @Override @@ -366,22 +377,32 @@ public Order visitOrderExpression(EsqlBaseParser.OrderExpressionContext ctx) { ); } - public NamedExpression visitProjectExpression(EsqlBaseParser.SourceIdentifierContext ctx) { - Source src = source(ctx); - String identifier = visitSourceIdentifier(ctx); - return identifier.equals(WILDCARD) ? new UnresolvedStar(src, null) : new UnresolvedAttribute(src, identifier); - } - @Override public Alias visitRenameClause(EsqlBaseParser.RenameClauseContext ctx) { Source src = source(ctx); - String newName = visitSourceIdentifier(ctx.newName); - String oldName = visitSourceIdentifier(ctx.oldName); - if (newName.contains(WILDCARD) || oldName.contains(WILDCARD)) { + NamedExpression newName = visitQualifiedNamePattern(ctx.newName); + NamedExpression oldName = visitQualifiedNamePattern(ctx.oldName); + if (newName.name().contains(WILDCARD) || oldName.name().contains(WILDCARD)) { throw new ParsingException(src, "Using wildcards (*) in renaming projections is not allowed [{}]", src.text()); } - return new Alias(src, newName, new UnresolvedAttribute(source(ctx.oldName), oldName)); + return new Alias(src, newName.name(), oldName); + } + + @Override + public NamedExpression visitEnrichWithClause(EsqlBaseParser.EnrichWithClauseContext ctx) { + Source src = source(ctx); + NamedExpression enrichField = enrichFieldName(ctx.enrichField); + NamedExpression newName = enrichFieldName(ctx.newName); + return newName == null ? enrichField : new Alias(src, newName.name(), enrichField); + } + + private NamedExpression enrichFieldName(EsqlBaseParser.QualifiedNamePatternContext ctx) { + var name = visitQualifiedNamePattern(ctx); + if (name != null && name.name().contains(WILDCARD)) { + throw new ParsingException(source(ctx), "Using wildcards (*) in ENRICH WITH projections is not allowed [{}]", name.name()); + } + return name; } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java index 296206b1079b2..2039dc633f6cf 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/IdentifierBuilder.java @@ -9,6 +9,8 @@ import org.antlr.v4.runtime.tree.TerminalNode; import org.elasticsearch.common.Strings; +import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.FromIdentifierContext; +import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.IdentifierContext; import java.util.List; @@ -17,27 +19,32 @@ abstract class IdentifierBuilder extends AbstractBuilder { @Override - public String visitIdentifier(EsqlBaseParser.IdentifierContext ctx) { - return unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.UNQUOTED_IDENTIFIER()); + public String visitIdentifier(IdentifierContext ctx) { + return ctx == null ? null : unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.UNQUOTED_IDENTIFIER()); } @Override - public String visitSourceIdentifier(EsqlBaseParser.SourceIdentifierContext ctx) { - return unquoteIdentifier(ctx.SRC_QUOTED_IDENTIFIER(), ctx.SRC_UNQUOTED_IDENTIFIER()); + public String visitIdentifierPattern(EsqlBaseParser.IdentifierPatternContext ctx) { + return unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.PROJECT_UNQUOTED_IDENTIFIER()); } - private static String unquoteIdentifier(TerminalNode quotedNode, TerminalNode unquotedNode) { + @Override + public String visitFromIdentifier(FromIdentifierContext ctx) { + return ctx == null ? null : unquoteIdentifier(ctx.QUOTED_IDENTIFIER(), ctx.FROM_UNQUOTED_IDENTIFIER()); + } + + static String unquoteIdentifier(TerminalNode quotedNode, TerminalNode unquotedNode) { String result; if (quotedNode != null) { String identifier = quotedNode.getText(); - result = identifier.substring(1, identifier.length() - 1); + result = identifier.substring(1, identifier.length() - 1).replace("``", "`"); } else { result = unquotedNode.getText(); } return result; } - public String visitSourceIdentifiers(List ctx) { + public String visitFromIdentifiers(List ctx) { return Strings.collectionToDelimitedString(visitList(this, ctx, String.class), ","); } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java index d5763f28f6394..f9d1a252afe42 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/parser/LogicalPlanBuilder.java @@ -12,6 +12,7 @@ import org.antlr.v4.runtime.tree.ParseTree; import org.elasticsearch.dissect.DissectException; import org.elasticsearch.dissect.DissectParser; +import org.elasticsearch.xpack.esql.parser.EsqlBaseParser.QualifiedNamePatternContext; import org.elasticsearch.xpack.esql.plan.logical.Dissect; import org.elasticsearch.xpack.esql.plan.logical.Drop; import org.elasticsearch.xpack.esql.plan.logical.Enrich; @@ -61,7 +62,6 @@ import static org.elasticsearch.xpack.ql.parser.ParserUtils.source; import static org.elasticsearch.xpack.ql.parser.ParserUtils.typedParsing; import static org.elasticsearch.xpack.ql.parser.ParserUtils.visitList; -import static org.elasticsearch.xpack.ql.util.StringUtils.WILDCARD; public class LogicalPlanBuilder extends ExpressionBuilder { @@ -149,9 +149,9 @@ public PlanFactory visitDissectCommand(EsqlBaseParser.DissectCommandContext ctx) @Override public PlanFactory visitMvExpandCommand(EsqlBaseParser.MvExpandCommandContext ctx) { - String identifier = visitSourceIdentifier(ctx.sourceIdentifier()); + UnresolvedAttribute field = visitQualifiedName(ctx.qualifiedName()); Source src = source(ctx); - return child -> new MvExpand(src, child, new UnresolvedAttribute(src, identifier), new UnresolvedAttribute(src, identifier)); + return child -> new MvExpand(src, child, field, new UnresolvedAttribute(src, field.name())); } @@ -175,11 +175,11 @@ public LogicalPlan visitRowCommand(EsqlBaseParser.RowCommandContext ctx) { @Override public LogicalPlan visitFromCommand(EsqlBaseParser.FromCommandContext ctx) { Source source = source(ctx); - TableIdentifier table = new TableIdentifier(source, null, visitSourceIdentifiers(ctx.sourceIdentifier())); + TableIdentifier table = new TableIdentifier(source, null, visitFromIdentifiers(ctx.fromIdentifier())); Map metadataMap = new LinkedHashMap<>(); if (ctx.metadata() != null) { - for (var c : ctx.metadata().sourceIdentifier()) { - String id = visitSourceIdentifier(c); + for (var c : ctx.metadata().fromIdentifier()) { + String id = visitFromIdentifier(c); Source src = source(c); if (MetadataAttribute.isSupported(id) == false) { throw new ParsingException(src, "unsupported metadata field [" + id + "]"); @@ -254,16 +254,16 @@ public Explain visitExplainCommand(EsqlBaseParser.ExplainCommandContext ctx) { @Override public PlanFactory visitDropCommand(EsqlBaseParser.DropCommandContext ctx) { - var identifiers = ctx.sourceIdentifier(); + var identifiers = ctx.qualifiedNamePattern(); List removals = new ArrayList<>(identifiers.size()); - for (EsqlBaseParser.SourceIdentifierContext idCtx : identifiers) { - Source src = source(idCtx); - String identifier = visitSourceIdentifier(idCtx); - if (identifier.equals(WILDCARD)) { + for (QualifiedNamePatternContext patternContext : identifiers) { + NamedExpression ne = visitQualifiedNamePattern(patternContext); + if (ne instanceof UnresolvedStar) { + var src = ne.source(); throw new ParsingException(src, "Removing all fields is not allowed [{}]", src.text()); } - removals.add(new UnresolvedAttribute(src, identifier)); + removals.add(ne); } return child -> new Drop(source(ctx), child, removals); @@ -280,13 +280,15 @@ public PlanFactory visitKeepCommand(EsqlBaseParser.KeepCommandContext ctx) { if (ctx.PROJECT() != null) { addWarning("PROJECT command is no longer supported, please use KEEP instead"); } - List projections = new ArrayList<>(ctx.sourceIdentifier().size()); + var identifiers = ctx.qualifiedNamePattern(); + List projections = new ArrayList<>(identifiers.size()); boolean hasSeenStar = false; - for (var srcIdCtx : ctx.sourceIdentifier()) { - NamedExpression ne = visitProjectExpression(srcIdCtx); + for (QualifiedNamePatternContext patternContext : identifiers) { + NamedExpression ne = visitQualifiedNamePattern(patternContext); if (ne instanceof UnresolvedStar) { if (hasSeenStar) { - throw new ParsingException(ne.source(), "Cannot specify [*] more than once", ne.source().text()); + var src = ne.source(); + throw new ParsingException(src, "Cannot specify [*] more than once", src.text()); } else { hasSeenStar = true; } @@ -309,11 +311,9 @@ public LogicalPlan visitShowFunctions(EsqlBaseParser.ShowFunctionsContext ctx) { @Override public PlanFactory visitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { return p -> { - final String policyName = visitSourceIdentifier(ctx.policyName); + String policyName = visitFromIdentifier(ctx.policyName); var source = source(ctx); - NamedExpression matchField = ctx.ON() != null - ? new UnresolvedAttribute(source(ctx.matchField), visitSourceIdentifier(ctx.matchField)) - : new EmptyAttribute(source); + NamedExpression matchField = ctx.ON() != null ? visitQualifiedNamePattern(ctx.matchField) : new EmptyAttribute(source); if (matchField.name().contains("*")) { throw new ParsingException( source(ctx), @@ -321,6 +321,7 @@ public PlanFactory visitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { matchField.name() ); } + List keepClauses = visitList(this, ctx.enrichWithClause(), NamedExpression.class); return new Enrich( source, @@ -333,22 +334,5 @@ public PlanFactory visitEnrichCommand(EsqlBaseParser.EnrichCommandContext ctx) { }; } - @Override - public NamedExpression visitEnrichWithClause(EsqlBaseParser.EnrichWithClauseContext ctx) { - Source src = source(ctx); - String enrichField = enrichFieldName(ctx.enrichField); - String newName = enrichFieldName(ctx.newName); - UnresolvedAttribute enrichAttr = new UnresolvedAttribute(src, enrichField); - return newName == null ? enrichAttr : new Alias(src, newName, enrichAttr); - } - - private String enrichFieldName(EsqlBaseParser.SourceIdentifierContext ctx) { - String name = ctx == null ? null : visitSourceIdentifier(ctx); - if (name != null && name.contains(WILDCARD)) { - throw new ParsingException(source(ctx), "Using wildcards (*) in ENRICH WITH projections is not allowed [{}]", name); - } - return name; - } - interface PlanFactory extends Function {} } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java index 17f669b5d30b3..0cdcd4af00026 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plan/logical/MvExpand.java @@ -22,13 +22,12 @@ public class MvExpand extends UnaryPlan { private final NamedExpression target; private final Attribute expanded; - private final List output; + private List output; public MvExpand(Source source, LogicalPlan child, NamedExpression target, Attribute expanded) { super(source, child); this.target = target; this.expanded = expanded; - this.output = calculateOutput(child.output(), target, expanded); } public static List calculateOutput(List input, NamedExpression target, Attribute expanded) { @@ -63,6 +62,9 @@ public UnaryPlan replaceChild(LogicalPlan newChild) { @Override public List output() { + if (output == null) { + output = calculateOutput(child().output(), target, expanded); + } return output; } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java new file mode 100644 index 0000000000000..98ac1a2d9910a --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/EsqlTranslatorHandler.java @@ -0,0 +1,59 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.planner; + +import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; +import org.elasticsearch.xpack.esql.type.EsqlDataTypeConverter; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.FieldAttribute; +import org.elasticsearch.xpack.ql.expression.MetadataAttribute; +import org.elasticsearch.xpack.ql.expression.function.scalar.ScalarFunction; +import org.elasticsearch.xpack.ql.expression.predicate.nulls.IsNotNull; +import org.elasticsearch.xpack.ql.expression.predicate.nulls.IsNull; +import org.elasticsearch.xpack.ql.planner.ExpressionTranslator; +import org.elasticsearch.xpack.ql.planner.ExpressionTranslators; +import org.elasticsearch.xpack.ql.planner.QlTranslatorHandler; +import org.elasticsearch.xpack.ql.querydsl.query.Query; +import org.elasticsearch.xpack.ql.type.DataType; + +import java.util.function.Supplier; + +public final class EsqlTranslatorHandler extends QlTranslatorHandler { + @Override + public Query asQuery(Expression e) { + return ExpressionTranslators.toQuery(e, this); + } + + @Override + public Object convert(Object value, DataType dataType) { + return EsqlDataTypeConverter.convert(value, dataType); + } + + @Override + public Query wrapFunctionQuery(ScalarFunction sf, Expression field, Supplier querySupplier) { + if (field instanceof FieldAttribute fa) { + if (fa.getExactInfo().hasExact()) { + var exact = fa.exactAttribute(); + if (exact != fa) { + fa = exact; + } + } + // don't wrap is null/is not null with SVQ + Query query = querySupplier.get(); + if ((sf instanceof IsNull || sf instanceof IsNotNull) == false) { + query = new SingleValueQuery(query, fa.name()); + } + return ExpressionTranslator.wrapIfNested(query, field); + } + if (field instanceof MetadataAttribute) { + return querySupplier.get(); // MetadataAttributes are always single valued + } + throw new EsqlIllegalArgumentException("Expected a FieldAttribute or MetadataAttribute but received [" + field + "]"); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java index e4e2402a9c7a3..5620969625575 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlanner.java @@ -56,7 +56,6 @@ import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.evaluator.command.GrokEvaluatorExtracter; -import org.elasticsearch.xpack.esql.plan.logical.local.LocalSupplier; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.DissectExec; import org.elasticsearch.xpack.esql.plan.physical.EnrichExec; @@ -88,7 +87,6 @@ import org.elasticsearch.xpack.ql.expression.NameId; import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.expression.Order; -import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.ql.util.Holder; import java.util.ArrayList; @@ -323,29 +321,6 @@ private PhysicalOperation planExchange(ExchangeExec exchangeExec, LocalExecution private PhysicalOperation planExchangeSink(ExchangeSinkExec exchangeSink, LocalExecutionPlannerContext context) { Objects.requireNonNull(exchangeSinkHandler, "ExchangeSinkHandler wasn't provided"); var child = exchangeSink.child(); - // see https://github.com/elastic/elasticsearch/issues/100807 - handle case where the plan has been fully minimized - // to a local relation and the aggregate intermediate data erased. For this scenario, match the output the exchange output - // with that of the local relation - - if (child instanceof LocalSourceExec localExec) { - var output = exchangeSink.output(); - var localOutput = localExec.output(); - if (output.equals(localOutput) == false) { - // the outputs are going to be similar except for the bool "seen" flags which are added in below - List blocks = new ArrayList<>(asList(localExec.supplier().get())); - if (blocks.size() > 0) { - for (int i = 0, s = output.size(); i < s; i++) { - var out = output.get(i); - if (out.dataType() == DataTypes.BOOLEAN) { - blocks.add(i, BlockFactory.getNonBreakingInstance().newConstantBooleanBlockWith(true, 1)); - } - } - } - var newSupplier = LocalSupplier.of(blocks.toArray(Block[]::new)); - - child = new LocalSourceExec(localExec.source(), output, newSupplier); - } - } PhysicalOperation source = plan(child, context); @@ -814,9 +789,7 @@ public List createDrivers(String sessionId) { @Override public String describe() { - StringBuilder sb = new StringBuilder(); - sb.append(driverFactories.stream().map(DriverFactory::describe).collect(joining("\n"))); - return sb.toString(); + return driverFactories.stream().map(DriverFactory::describe).collect(joining("\n")); } } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java index adf684d573cd1..fc267c6bd78b8 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/planner/PlannerUtils.java @@ -8,6 +8,9 @@ package org.elasticsearch.xpack.esql.planner; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.ElementType; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.query.QueryBuilder; @@ -222,4 +225,14 @@ public static ElementType toElementType(DataType dataType) { } throw EsqlIllegalArgumentException.illegalDataType(dataType); } + + /** + * A non-breaking block factory used to create small pages during the planning + * TODO: Remove this + */ + @Deprecated(forRemoval = true) + public static final BlockFactory NON_BREAKING_BLOCK_FACTORY = BlockFactory.getInstance( + new NoopCircuitBreaker("noop-esql-breaker"), + BigArrays.NON_RECYCLING_INSTANCE + ); } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java index b7b31868d65e2..106846152a1a0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/ComputeService.java @@ -15,6 +15,7 @@ import org.elasticsearch.action.search.SearchShardsRequest; import org.elasticsearch.action.search.SearchShardsResponse; import org.elasticsearch.action.search.TransportSearchShardsAction; +import org.elasticsearch.action.support.ChannelActionListener; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.RefCountingRunnable; @@ -26,7 +27,6 @@ import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.CountDown; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.compute.OwningChannelActionListener; import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.Driver; @@ -172,10 +172,9 @@ public void execute( concreteIndices, originalIndices, listener.delegateFailureAndWrap((delegate, targetNodes) -> { - final ExchangeSourceHandler exchangeSource = exchangeService.createSourceHandler( - sessionId, + final ExchangeSourceHandler exchangeSource = new ExchangeSourceHandler( queryPragmas.exchangeBufferSize(), - ESQL_THREAD_POOL_NAME + transportService.getThreadPool().executor(ESQL_THREAD_POOL_NAME) ); final List collectedProfiles = configuration.profile() ? Collections.synchronizedList(new ArrayList<>()) @@ -328,6 +327,7 @@ void runCompute(CancellableTask task, ComputeContext context, PhysicalPlan plan, private void acquireSearchContexts( List shardIds, + EsqlConfiguration configuration, Map aliasFilters, ActionListener> listener ) { @@ -351,11 +351,12 @@ private void acquireSearchContexts( try { for (IndexShard shard : targetShards) { var aliasFilter = aliasFilters.getOrDefault(shard.shardId().getIndex(), AliasFilter.EMPTY); - ShardSearchRequest shardSearchLocalRequest = new ShardSearchRequest(shard.shardId(), 0, aliasFilter); - SearchContext context = searchService.createSearchContext( - shardSearchLocalRequest, - SearchService.NO_TIMEOUT + var shardRequest = new ShardSearchRequest( + shard.shardId(), + configuration.absoluteStartedTimeInMillis(), + aliasFilter ); + SearchContext context = searchService.createSearchContext(shardRequest, SearchService.NO_TIMEOUT); searchContexts.add(context); } for (SearchContext searchContext : searchContexts) { @@ -500,9 +501,10 @@ public void messageReceived(DataNodeRequest request, TransportChannel channel, T final var sessionId = request.sessionId(); final var exchangeSink = exchangeService.getSinkHandler(sessionId); parentTask.addListener(() -> exchangeService.finishSinkHandler(sessionId, new TaskCancelledException("task cancelled"))); - final ActionListener listener = new OwningChannelActionListener<>(channel); - acquireSearchContexts(request.shardIds(), request.aliasFilters(), ActionListener.wrap(searchContexts -> { - var computeContext = new ComputeContext(sessionId, searchContexts, request.configuration(), null, exchangeSink); + final ActionListener listener = new ChannelActionListener<>(channel); + final EsqlConfiguration configuration = request.configuration(); + acquireSearchContexts(request.shardIds(), configuration, request.aliasFilters(), ActionListener.wrap(searchContexts -> { + var computeContext = new ComputeContext(sessionId, searchContexts, configuration, null, exchangeSink); runCompute(parentTask, computeContext, request.plan(), ActionListener.wrap(driverProfiles -> { // don't return until all pages are fetched exchangeSink.addCompletionListener( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index e8a57e5a49808..b9564577e53f0 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -17,6 +17,8 @@ import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsFilter; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.Block; import org.elasticsearch.compute.data.BlockFactory; @@ -41,7 +43,11 @@ import org.elasticsearch.xpack.core.action.XPackUsageFeatureAction; import org.elasticsearch.xpack.esql.EsqlInfoTransportAction; import org.elasticsearch.xpack.esql.EsqlUsageTransportAction; +import org.elasticsearch.xpack.esql.action.EsqlAsyncGetResultAction; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; +import org.elasticsearch.xpack.esql.action.RestEsqlAsyncQueryAction; +import org.elasticsearch.xpack.esql.action.RestEsqlDeleteAsyncResultAction; +import org.elasticsearch.xpack.esql.action.RestEsqlGetAsyncResultAction; import org.elasticsearch.xpack.esql.action.RestEsqlQueryAction; import org.elasticsearch.xpack.esql.execution.PlanExecutor; import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; @@ -82,7 +88,13 @@ public class EsqlPlugin extends Plugin implements ActionPlugin { public Collection createComponents(PluginServices services) { CircuitBreaker circuitBreaker = services.indicesService().getBigArrays().breakerService().getBreaker("request"); Objects.requireNonNull(circuitBreaker, "request circuit breaker wasn't set"); - BlockFactory blockFactory = new BlockFactory(circuitBreaker, services.indicesService().getBigArrays().withCircuitBreaking()); + Settings settings = services.clusterService().getSettings(); + ByteSizeValue maxPrimitiveArrayBlockSize = settings.getAsBytesSize( + BlockFactory.MAX_BLOCK_PRIMITIVE_ARRAY_SIZE_SETTING, + BlockFactory.DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE + ); + BigArrays bigArrays = services.indicesService().getBigArrays().withCircuitBreaking(); + BlockFactory blockFactory = new BlockFactory(circuitBreaker, bigArrays, maxPrimitiveArrayBlockSize, null); return List.of( new PlanExecutor( new IndexResolver( @@ -116,6 +128,7 @@ public List> getSettings() { public List> getActions() { return List.of( new ActionHandler<>(EsqlQueryAction.INSTANCE, TransportEsqlQueryAction.class), + new ActionHandler<>(EsqlAsyncGetResultAction.INSTANCE, TransportEsqlAsyncGetResultsAction.class), new ActionHandler<>(EsqlStatsAction.INSTANCE, TransportEsqlStatsAction.class), new ActionHandler<>(XPackUsageFeatureAction.ESQL, EsqlUsageTransportAction.class), new ActionHandler<>(XPackInfoFeatureAction.ESQL, EsqlInfoTransportAction.class) @@ -132,7 +145,12 @@ public List getRestHandlers( IndexNameExpressionResolver indexNameExpressionResolver, Supplier nodesInCluster ) { - return List.of(new RestEsqlQueryAction()); + return List.of( + new RestEsqlQueryAction(), + new RestEsqlAsyncQueryAction(), + new RestEsqlGetAsyncResultAction(), + new RestEsqlDeleteAsyncResultAction() + ); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java new file mode 100644 index 0000000000000..8785b8f5de887 --- /dev/null +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java @@ -0,0 +1,58 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.plugin; + +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.esql.action.EsqlAsyncGetResultAction; +import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; +import org.elasticsearch.xpack.esql.action.EsqlQueryTask; +import org.elasticsearch.xpack.ql.plugin.AbstractTransportQlAsyncGetResultsAction; + +public class TransportEsqlAsyncGetResultsAction extends AbstractTransportQlAsyncGetResultsAction { + + private final BlockFactory blockFactory; + + @Inject + public TransportEsqlAsyncGetResultsAction( + TransportService transportService, + ActionFilters actionFilters, + ClusterService clusterService, + NamedWriteableRegistry registry, + Client client, + ThreadPool threadPool, + BigArrays bigArrays, + BlockFactory blockFactory + ) { + super( + EsqlAsyncGetResultAction.NAME, + transportService, + actionFilters, + clusterService, + registry, + client, + threadPool, + bigArrays, + EsqlQueryTask.class + ); + this.blockFactory = blockFactory; + } + + @Override + public Writeable.Reader responseReader() { + return EsqlQueryResponse.reader(blockFactory); + } +} diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index d272aba26e4e8..5a199a6581156 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -11,8 +11,11 @@ import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.BlockFactory; @@ -23,22 +26,32 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.XPackPlugin; +import org.elasticsearch.xpack.core.async.AsyncExecutionId; import org.elasticsearch.xpack.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryAction; import org.elasticsearch.xpack.esql.action.EsqlQueryRequest; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; +import org.elasticsearch.xpack.esql.action.EsqlQueryTask; import org.elasticsearch.xpack.esql.enrich.EnrichLookupService; import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolver; import org.elasticsearch.xpack.esql.execution.PlanExecutor; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.async.AsyncTaskManagementService; +import java.io.IOException; import java.time.ZoneOffset; import java.util.List; import java.util.Locale; +import java.util.Map; import java.util.concurrent.Executor; -public class TransportEsqlQueryAction extends HandledTransportAction { +import static org.elasticsearch.xpack.core.ClientHelper.ASYNC_SEARCH_ORIGIN; + +public class TransportEsqlQueryAction extends HandledTransportAction + implements + AsyncTaskManagementService.AsyncOperation { private final PlanExecutor planExecutor; private final ComputeService computeService; @@ -47,6 +60,7 @@ public class TransportEsqlQueryAction extends HandledTransportAction asyncTaskManagementService; @Inject public TransportEsqlQueryAction( @@ -58,7 +72,10 @@ public TransportEsqlQueryAction( ClusterService clusterService, ThreadPool threadPool, BigArrays bigArrays, - BlockFactory blockFactory + BlockFactory blockFactory, + Client client, + NamedWriteableRegistry registry + ) { // TODO replace SAME when removing workaround for https://github.com/elastic/elasticsearch/issues/97916 super(EsqlQueryAction.NAME, transportService, actionFilters, EsqlQueryRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); @@ -79,12 +96,41 @@ public TransportEsqlQueryAction( bigArrays, blockFactory ); + this.asyncTaskManagementService = new AsyncTaskManagementService<>( + XPackPlugin.ASYNC_RESULTS_INDEX, + client, + ASYNC_SEARCH_ORIGIN, + registry, + taskManager, + EsqlQueryAction.INSTANCE.name(), + this, + EsqlQueryTask.class, + clusterService, + threadPool, + bigArrays + ); } @Override protected void doExecute(Task task, EsqlQueryRequest request, ActionListener listener) { - // workaround for https://github.com/elastic/elasticsearch/issues/97916 - TODO remove this when we can - requestExecutor.execute(ActionRunnable.wrap(listener, l -> doExecuteForked(task, request, l))); + listener = listener.delegateFailureAndWrap(ActionListener::respondAndRelease); + if (requestIsAsync(request)) { + asyncTaskManagementService.asyncExecute( + request, + request.waitForCompletionTimeout(), + request.keepAlive(), + request.keepOnCompletion(), + listener + ); + } else { + // workaround for https://github.com/elastic/elasticsearch/issues/97916 - TODO remove this when we can + requestExecutor.execute(ActionRunnable.wrap(listener, l -> doExecuteForked(task, request, l))); + } + } + + @Override + public void execute(EsqlQueryRequest request, EsqlQueryTask task, ActionListener listener) { + doExecuteForked(task, request, listener); } private void doExecuteForked(Task task, EsqlQueryRequest request, ActionListener listener) { @@ -120,7 +166,12 @@ private void doExecuteForked(Task task, EsqlQueryRequest request, ActionListener EsqlQueryResponse.Profile profile = configuration.profile() ? new EsqlQueryResponse.Profile(result.profiles()) : null; - return new EsqlQueryResponse(columns, result.pages(), profile, request.columnar()); + if (task instanceof EsqlQueryTask asyncTask && request.keepOnCompletion()) { + String id = asyncTask.getExecutionId().getEncoded(); + return new EsqlQueryResponse(columns, result.pages(), profile, request.columnar(), id, false, request.async()); + } else { + return new EsqlQueryResponse(columns, result.pages(), profile, request.columnar(), request.async()); + } }) ) ) @@ -143,4 +194,50 @@ public ExchangeService exchangeService() { public EnrichLookupService enrichLookupService() { return enrichLookupService; } + + @Override + public EsqlQueryTask createTask( + EsqlQueryRequest request, + long id, + String type, + String action, + TaskId parentTaskId, + Map headers, + Map originHeaders, + AsyncExecutionId asyncExecutionId + ) { + return new EsqlQueryTask( + id, + type, + action, + request.getDescription(), + parentTaskId, + headers, + originHeaders, + asyncExecutionId, + request.keepAlive() + ); + } + + @Override + public EsqlQueryResponse initialResponse(EsqlQueryTask task) { + return new EsqlQueryResponse( + List.of(), + List.of(), + null, + false, + task.getExecutionId().getEncoded(), + true, // is_running + true // isAsync + ); + } + + @Override + public EsqlQueryResponse readResponse(StreamInput inputStream) throws IOException { + throw new AssertionError("should not reach here"); + } + + private static boolean requestIsAsync(EsqlQueryRequest request) { + return request.async(); + } } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlConfiguration.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlConfiguration.java index ac13f25c2d2a9..ccec6554cb2cb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlConfiguration.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/session/EsqlConfiguration.java @@ -112,6 +112,15 @@ public String query() { return query; } + /** + * Returns the current time in milliseconds from the time epoch for the execution of this request. + * It ensures consistency by using the same value on all nodes involved in the search request. + * Note: Currently, it returns {@link System#currentTimeMillis()}, but this value will be serialized between nodes. + */ + public long absoluteStartedTimeInMillis() { + return System.currentTimeMillis(); + } + /** * Enable profiling, sacrificing performance to return information about * what operations are taking the most time. diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java index b5d75a1528493..699c5a8e13a46 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/stats/SearchStats.java @@ -161,14 +161,16 @@ public boolean isSingleValue(String field) { if (exists(field) == false) { stat.singleValue = true; } else { - var sv = new boolean[] { false }; + var sv = new boolean[] { true }; for (SearchContext context : contexts) { MappedFieldType mappedType = context.getSearchExecutionContext().getFieldType(field); - doWithContexts(r -> { - sv[0] &= detectSingleValue(r, mappedType, field); - return sv[0]; - }, true); - break; + if (mappedType != null) { + doWithContexts(r -> { + sv[0] &= detectSingleValue(r, mappedType, field); + return sv[0]; + }, true); + break; + } } stat.singleValue = sv[0]; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java index 17ed0c1223636..d19922afd2815 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/CsvTests.java @@ -229,7 +229,7 @@ protected final boolean enableWarningsCheck() { } public boolean logResults() { - return true; + return false; } private void doTest() throws Exception { @@ -331,11 +331,16 @@ private ActualResults executePlan(BigArrays bigArrays) throws Exception { ExchangeSinkHandler exchangeSink = new ExchangeSinkHandler(between(1, 64), threadPool::relativeTimeInMillis); Settings.Builder settings = Settings.builder(); + BlockFactory blockFactory = new BlockFactory( + bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), + bigArrays, + ByteSizeValue.ofBytes(randomLongBetween(1, BlockFactory.DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE.getBytes() * 2)) + ); LocalExecutionPlanner executionPlanner = new LocalExecutionPlanner( sessionId, new CancellableTask(1, "transport", "esql", null, TaskId.EMPTY_TASK_ID, Map.of()), bigArrays, - new BlockFactory(bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST), bigArrays), + blockFactory, randomNodeSettings(), configuration, exchangeSource, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java index c8da2792c7565..4be95b95afe54 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/SerializationTestUtils.java @@ -14,6 +14,7 @@ import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.index.query.BoolQueryBuilder; +import org.elasticsearch.index.query.ExistsQueryBuilder; import org.elasticsearch.index.query.MatchAllQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; @@ -29,6 +30,7 @@ import org.elasticsearch.xpack.esql.querydsl.query.SingleValueQuery; import org.elasticsearch.xpack.esql.session.EsqlConfiguration; import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; import java.io.IOException; import java.io.UncheckedIOException; @@ -43,6 +45,11 @@ public static void assertSerialization(PhysicalPlan plan) { EqualsHashCodeTestUtils.checkEqualsAndHashCode(plan, unused -> deserPlan); } + public static void assertSerialization(LogicalPlan plan) { + var deserPlan = serializeDeserialize(plan, PlanStreamOutput::writeLogicalPlanNode, PlanStreamInput::readLogicalPlanNode); + EqualsHashCodeTestUtils.checkEqualsAndHashCode(plan, unused -> deserPlan); + } + public static void assertSerialization(Expression expression) { Expression deserExpression = serializeDeserialize(expression, PlanStreamOutput::writeExpression, PlanStreamInput::readExpression); EqualsHashCodeTestUtils.checkEqualsAndHashCode(expression, unused -> deserExpression); @@ -85,6 +92,7 @@ public static NamedWriteableRegistry writableRegistry() { new NamedWriteableRegistry.Entry(QueryBuilder.class, BoolQueryBuilder.NAME, BoolQueryBuilder::new), new NamedWriteableRegistry.Entry(QueryBuilder.class, WildcardQueryBuilder.NAME, WildcardQueryBuilder::new), new NamedWriteableRegistry.Entry(QueryBuilder.class, RegexpQueryBuilder.NAME, RegexpQueryBuilder::new), + new NamedWriteableRegistry.Entry(QueryBuilder.class, ExistsQueryBuilder.NAME, ExistsQueryBuilder::new), SingleValueQuery.ENTRY ) ); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/TestBlockFactory.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/TestBlockFactory.java new file mode 100644 index 0000000000000..99cf8be307054 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/TestBlockFactory.java @@ -0,0 +1,27 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql; + +import org.elasticsearch.common.breaker.NoopCircuitBreaker; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.compute.data.BlockFactory; + +public class TestBlockFactory { + + private static final BlockFactory NON_BREAKING = BlockFactory.getInstance( + new NoopCircuitBreaker("noop-esql-breaker"), + BigArrays.NON_RECYCLING_INSTANCE + ); + + /** + * Returns the Non-Breaking block factory. + */ + public static BlockFactory getNonBreakingInstance() { + return NON_BREAKING; + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java index b1b492b28076e..b7ea867f82cde 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryRequestTests.java @@ -31,6 +31,7 @@ import java.util.List; import java.util.Locale; import java.util.Map; +import java.util.function.Function; import java.util.function.Supplier; import static org.hamcrest.Matchers.containsString; @@ -55,7 +56,7 @@ public void testParseFields() throws IOException { "filter": %s %s""", query, columnar, locale.toLanguageTag(), filter, paramsString); - EsqlQueryRequest request = parseEsqlQueryRequest(json); + EsqlQueryRequest request = parseEsqlQueryRequestSync(json); assertEquals(query, request.query()); assertEquals(columnar, request.columnar()); @@ -69,6 +70,57 @@ public void testParseFields() throws IOException { } } + public void testParseFieldsForAsync() throws IOException { + String query = randomAlphaOfLengthBetween(1, 100); + boolean columnar = randomBoolean(); + Locale locale = randomLocale(random()); + QueryBuilder filter = randomQueryBuilder(); + + List params = randomParameters(); + boolean hasParams = params.isEmpty() == false; + StringBuilder paramsString = paramsString(params, hasParams); + boolean keepOnCompletion = randomBoolean(); + TimeValue waitForCompletion = TimeValue.parseTimeValue(randomTimeValue(), "test"); + TimeValue keepAlive = TimeValue.parseTimeValue(randomTimeValue(), "test"); + String json = String.format( + Locale.ROOT, + """ + { + "query": "%s", + "columnar": %s, + "locale": "%s", + "filter": %s, + "keep_on_completion": %s, + "wait_for_completion_timeout": "%s", + "keep_alive": "%s" + %s""", + query, + columnar, + locale.toLanguageTag(), + filter, + keepOnCompletion, + waitForCompletion.getStringRep(), + keepAlive.getStringRep(), + paramsString + ); + + EsqlQueryRequest request = parseEsqlQueryRequestAsync(json); + + assertEquals(query, request.query()); + assertEquals(columnar, request.columnar()); + assertEquals(locale.toLanguageTag(), request.locale().toLanguageTag()); + assertEquals(locale, request.locale()); + assertEquals(filter, request.filter()); + assertEquals(keepOnCompletion, request.keepOnCompletion()); + assertEquals(waitForCompletion, request.waitForCompletionTimeout()); + assertEquals(keepAlive, request.keepAlive()); + + assertEquals(params.size(), request.params().size()); + for (int i = 0; i < params.size(); i++) { + assertEquals(params.get(i), request.params().get(i)); + } + } + public void testRejectUnknownFields() { assertParserErrorMessage(""" { @@ -84,10 +136,15 @@ public void testRejectUnknownFields() { } public void testMissingQueryIsNotValidation() throws IOException { - EsqlQueryRequest request = parseEsqlQueryRequest(""" + String json = """ { "columnar": true - }"""); + }"""; + EsqlQueryRequest request = parseEsqlQueryRequestSync(json); + assertNotNull(request.validate()); + assertThat(request.validate().getMessage(), containsString("[query] is required")); + + request = parseEsqlQueryRequestAsync(json); assertNotNull(request.validate()); assertThat(request.validate().getMessage(), containsString("[query] is required")); } @@ -96,10 +153,12 @@ public void testTask() throws IOException { String query = randomAlphaOfLength(10); int id = randomInt(); - EsqlQueryRequest request = parseEsqlQueryRequest(""" + String requestJson = """ { "query": "QUERY" - }""".replace("QUERY", query)); + }""".replace("QUERY", query); + + EsqlQueryRequest request = parseEsqlQueryRequestSync(requestJson); Task task = request.createTask(id, "transport", EsqlQueryAction.NAME, TaskId.EMPTY_TASK_ID, Map.of()); assertThat(task.getDescription(), equalTo(query)); @@ -180,17 +239,29 @@ private StringBuilder paramsString(List params, boolean hasPara } private static void assertParserErrorMessage(String json, String message) { - Exception e = expectThrows(IllegalArgumentException.class, () -> parseEsqlQueryRequest(json)); + Exception e = expectThrows(IllegalArgumentException.class, () -> parseEsqlQueryRequestSync(json)); + assertThat(e.getMessage(), containsString(message)); + + e = expectThrows(IllegalArgumentException.class, () -> parseEsqlQueryRequestAsync(json)); assertThat(e.getMessage(), containsString(message)); } - private static EsqlQueryRequest parseEsqlQueryRequest(String json) throws IOException { + static EsqlQueryRequest parseEsqlQueryRequestSync(String json) throws IOException { + return parseEsqlQueryRequest(json, EsqlQueryRequest::fromXContentSync); + } + + static EsqlQueryRequest parseEsqlQueryRequestAsync(String json) throws IOException { + return parseEsqlQueryRequest(json, EsqlQueryRequest::fromXContentAsync); + } + + static EsqlQueryRequest parseEsqlQueryRequest(String json, Function fromXContentFunc) + throws IOException { SearchModule searchModule = new SearchModule(Settings.EMPTY, Collections.emptyList()); XContentParserConfiguration config = XContentParserConfiguration.EMPTY.withRegistry( new NamedXContentRegistry(searchModule.getNamedXContents()) ); try (XContentParser parser = XContentType.JSON.xContent().createParser(config, json)) { - return EsqlQueryRequest.fromXContent(parser); + return fromXContentFunc.apply(parser); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java index 25083268a3761..debcb5345bfa9 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/action/EsqlQueryResponseTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; -import org.elasticsearch.compute.data.IntArrayVector; import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.LongBlock; import org.elasticsearch.compute.data.Page; @@ -32,11 +31,17 @@ import org.elasticsearch.compute.operator.AbstractPageMappingOperator; import org.elasticsearch.compute.operator.DriverProfile; import org.elasticsearch.compute.operator.DriverStatus; +import org.elasticsearch.core.Nullable; import org.elasticsearch.core.Releasables; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; +import org.elasticsearch.xcontent.InstantiatingObjectParser; +import org.elasticsearch.xcontent.ObjectParser; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ParserConstructor; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; import org.elasticsearch.xpack.ql.type.DataType; @@ -51,6 +56,9 @@ import java.util.List; import java.util.stream.Stream; +import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.xpack.esql.action.ResponseValueUtils.valuesToPage; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; import static org.hamcrest.Matchers.equalTo; @@ -89,11 +97,21 @@ protected EsqlQueryResponse createTestInstance() { } EsqlQueryResponse randomResponse(boolean columnar, EsqlQueryResponse.Profile profile) { + return randomResponseAsync(columnar, profile, false); + } + + EsqlQueryResponse randomResponseAsync(boolean columnar, EsqlQueryResponse.Profile profile, boolean async) { int noCols = randomIntBetween(1, 10); List columns = randomList(noCols, noCols, this::randomColumnInfo); int noPages = randomIntBetween(1, 20); List values = randomList(noPages, noPages, () -> randomPage(columns)); - return new EsqlQueryResponse(columns, values, profile, columnar); + String id = null; + boolean isRunning = false; + if (async) { + id = randomAlphaOfLengthBetween(1, 16); + isRunning = randomBoolean(); + } + return new EsqlQueryResponse(columns, values, profile, columnar, id, isRunning, async); } private ColumnInfo randomColumnInfo() { @@ -167,19 +185,21 @@ protected EsqlQueryResponse mutateInstance(EsqlQueryResponse instance) { List cols = new ArrayList<>(instance.columns()); // keep the type the same so the values are still valid but change the name cols.set(mutCol, new ColumnInfo(cols.get(mutCol).name() + "mut", cols.get(mutCol).type())); - yield new EsqlQueryResponse(cols, deepCopyOfPages(instance), instance.profile(), instance.columnar()); + yield new EsqlQueryResponse(cols, deepCopyOfPages(instance), instance.profile(), instance.columnar(), instance.isAsync()); } case 1 -> new EsqlQueryResponse( instance.columns(), deepCopyOfPages(instance), instance.profile(), - false == instance.columnar() + false == instance.columnar(), + instance.isAsync() ); case 2 -> new EsqlQueryResponse( instance.columns(), deepCopyOfPages(instance), randomValueOtherThan(instance.profile(), this::randomProfile), - instance.columnar() + instance.columnar(), + instance.isAsync() ); case 3 -> { int noPages = instance.pages().size(); @@ -188,7 +208,13 @@ protected EsqlQueryResponse mutateInstance(EsqlQueryResponse instance) { differentPages.forEach(p -> Releasables.closeExpectNoException(p::releaseBlocks)); differentPages = randomList(noPages, noPages, () -> randomPage(instance.columns())); } while (differentPages.equals(instance.pages())); - yield new EsqlQueryResponse(instance.columns(), differentPages, instance.profile(), instance.columnar()); + yield new EsqlQueryResponse( + instance.columns(), + differentPages, + instance.profile(), + instance.columnar(), + instance.isAsync() + ); } default -> throw new IllegalArgumentException(); }; @@ -214,7 +240,58 @@ protected Writeable.Reader instanceReader() { @Override protected EsqlQueryResponse doParseInstance(XContentParser parser) { - return EsqlQueryResponse.fromXContent(parser); + return ResponseBuilder.fromXContent(parser); + } + + public static class ResponseBuilder { + private static final ParseField ID = new ParseField("id"); + private static final ParseField IS_RUNNING = new ParseField("is_running"); + private static final InstantiatingObjectParser PARSER; + + static { + InstantiatingObjectParser.Builder parser = InstantiatingObjectParser.builder( + "esql/query_response", + true, + ResponseBuilder.class + ); + parser.declareString(optionalConstructorArg(), ID); + parser.declareField( + optionalConstructorArg(), + p -> p.currentToken() == XContentParser.Token.VALUE_NULL ? false : p.booleanValue(), + IS_RUNNING, + ObjectParser.ValueType.BOOLEAN_OR_NULL + ); + parser.declareObjectArray(constructorArg(), (p, c) -> ColumnInfo.fromXContent(p), new ParseField("columns")); + parser.declareField(constructorArg(), (p, c) -> p.list(), new ParseField("values"), ObjectParser.ValueType.OBJECT_ARRAY); + PARSER = parser.build(); + } + + // Used for XContent reconstruction + private final EsqlQueryResponse response; + + @ParserConstructor + public ResponseBuilder(@Nullable String asyncExecutionId, Boolean isRunning, List columns, List> values) { + this.response = new EsqlQueryResponse( + columns, + List.of(valuesToPage(TestBlockFactory.getNonBreakingInstance(), columns, values)), + null, + false, + asyncExecutionId, + isRunning != null, + isAsync(asyncExecutionId, isRunning) + ); + } + + static boolean isAsync(@Nullable String asyncExecutionId, Boolean isRunning) { + if (asyncExecutionId != null || isRunning != null) { + return true; + } + return false; + } + + static EsqlQueryResponse fromXContent(XContentParser parser) { + return PARSER.apply(parser, null).response; + } } public void testChunkResponseSizeColumnar() { @@ -223,6 +300,12 @@ public void testChunkResponseSizeColumnar() { int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount() * p.getBlockCount()).sum() + columnCount * 2; assertChunkCount(resp, r -> 5 + bodySize); } + + try (EsqlQueryResponse resp = randomResponseAsync(true, null, true)) { + int columnCount = resp.pages().get(0).getBlockCount(); + int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount() * p.getBlockCount()).sum() + columnCount * 2; + assertChunkCount(resp, r -> 6 + bodySize); // is_running + } } public void testChunkResponseSizeRows() { @@ -230,6 +313,10 @@ public void testChunkResponseSizeRows() { int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount()).sum(); assertChunkCount(resp, r -> 5 + bodySize); } + try (EsqlQueryResponse resp = randomResponseAsync(false, null, true)) { + int bodySize = resp.pages().stream().mapToInt(p -> p.getPositionCount()).sum(); + assertChunkCount(resp, r -> 6 + bodySize); + } } public void testSimpleXContentColumnar() { @@ -239,6 +326,13 @@ public void testSimpleXContentColumnar() { } } + public void testSimpleXContentColumnarAsync() { + try (EsqlQueryResponse response = simple(true, true)) { + assertThat(Strings.toString(response), equalTo(""" + {"is_running":false,"columns":[{"name":"foo","type":"integer"}],"values":[[40,80]]}""")); + } + } + public void testSimpleXContentRows() { try (EsqlQueryResponse response = simple(false)) { assertThat(Strings.toString(response), equalTo(""" @@ -246,12 +340,41 @@ public void testSimpleXContentRows() { } } + public void testSimpleXContentRowsAsync() { + try (EsqlQueryResponse response = simple(false, true)) { + assertThat(Strings.toString(response), equalTo(""" + {"is_running":false,"columns":[{"name":"foo","type":"integer"}],"values":[[40],[80]]}""")); + } + } + + public void testBasicXContentIdAndRunning() { + try ( + EsqlQueryResponse response = new EsqlQueryResponse( + List.of(new ColumnInfo("foo", "integer")), + List.of(new Page(blockFactory.newIntArrayVector(new int[] { 40, 80 }, 2).asBlock())), + null, + false, + "id-123", + true, + true + ) + ) { + assertThat(Strings.toString(response), equalTo(""" + {"id":"id-123","is_running":true,"columns":[{"name":"foo","type":"integer"}],"values":[[40],[80]]}""")); + } + } + private EsqlQueryResponse simple(boolean columnar) { + return simple(columnar, false); + } + + private EsqlQueryResponse simple(boolean columnar, boolean async) { return new EsqlQueryResponse( List.of(new ColumnInfo("foo", "integer")), - List.of(new Page(new IntArrayVector(new int[] { 40, 80 }, 2).asBlock())), + List.of(new Page(blockFactory.newIntArrayVector(new int[] { 40, 80 }, 2).asBlock())), null, - columnar + columnar, + async ); } @@ -259,10 +382,11 @@ public void testProfileXContent() { try ( EsqlQueryResponse response = new EsqlQueryResponse( List.of(new ColumnInfo("foo", "integer")), - List.of(new Page(new IntArrayVector(new int[] { 40, 80 }, 2).asBlock())), + List.of(new Page(blockFactory.newIntArrayVector(new int[] { 40, 80 }, 2).asBlock())), new EsqlQueryResponse.Profile( List.of(new DriverProfile(List.of(new DriverStatus.OperatorStatus("asdf", new AbstractPageMappingOperator.Status(10))))) ), + false, false ); ) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java index 93456ff30c4cd..90e45a0a8b5a7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/analysis/AnalyzerTests.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.esql.enrich.EnrichPolicyResolution; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; import org.elasticsearch.xpack.esql.expression.function.aggregate.Max; +import org.elasticsearch.xpack.esql.parser.ParsingException; import org.elasticsearch.xpack.esql.plan.logical.EsqlUnresolvedRelation; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.Row; @@ -266,11 +267,68 @@ public void testNoProjection() { ); } - public void testProjectOrder() { + public void testDuplicateProjections() { + assertProjection(""" + from test + | keep first_name, first_name + """, "first_name"); + assertProjection(""" + from test + | keep first_name, first_name, last_name, first_name + """, "last_name", "first_name"); + } + + public void testProjectWildcard() { assertProjection(""" from test | keep first_name, *, last_name """, "first_name", "_meta_field", "emp_no", "gender", "job", "job.raw", "languages", "long_noidx", "salary", "last_name"); + assertProjection(""" + from test + | keep first_name, last_name, * + """, "first_name", "last_name", "_meta_field", "emp_no", "gender", "job", "job.raw", "languages", "long_noidx", "salary"); + assertProjection(""" + from test + | keep *, first_name, last_name + """, "_meta_field", "emp_no", "gender", "job", "job.raw", "languages", "long_noidx", "salary", "first_name", "last_name"); + + var e = expectThrows(ParsingException.class, () -> analyze(""" + from test + | keep *, first_name, last_name, * + """)); + assertThat(e.getMessage(), containsString("Cannot specify [*] more than once")); + + } + + public void testProjectMixedWildcard() { + assertProjection(""" + from test + | keep *name, first* + """, "last_name", "first_name"); + assertProjection(""" + from test + | keep first_name, *name, first* + """, "first_name", "last_name"); + assertProjection(""" + from test + | keep *ob*, first_name, *name, first* + """, "job", "job.raw", "first_name", "last_name"); + assertProjection(""" + from test + | keep first_name, *, *name + """, "first_name", "_meta_field", "emp_no", "gender", "job", "job.raw", "languages", "long_noidx", "salary", "last_name"); + assertProjection(""" + from test + | keep first*, *, last_name, first_name + """, "_meta_field", "emp_no", "gender", "job", "job.raw", "languages", "long_noidx", "salary", "last_name", "first_name"); + assertProjection(""" + from test + | keep first*, *, last_name, fir* + """, "_meta_field", "emp_no", "gender", "job", "job.raw", "languages", "long_noidx", "salary", "last_name", "first_name"); + assertProjection(""" + from test + | keep *, job* + """, "_meta_field", "emp_no", "first_name", "gender", "languages", "last_name", "long_noidx", "salary", "job", "job.raw"); } public void testProjectThenDropName() { @@ -1429,9 +1487,6 @@ public void testMissingAttributeException_InChainedEval() { public void testUnresolvedMvExpand() { var e = expectThrows(VerificationException.class, () -> analyze("row foo = 1 | mv_expand bar")); assertThat(e.getMessage(), containsString("Unknown column [bar]")); - - e = expectThrows(VerificationException.class, () -> analyze("row foo = 1 | keep foo, foo | mv_expand foo")); - assertThat(e.getMessage(), containsString("Reference [foo] is ambiguous (to disambiguate use quotes or qualifiers)")); } private void verifyUnsupported(String query, String errorMessage) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 79a7ed2a09e2c..fb2482d50173b 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -32,6 +32,7 @@ import org.elasticsearch.logging.LogManager; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.json.JsonXContent; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.expression.function.scalar.conditional.Greatest; import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; @@ -40,6 +41,7 @@ import org.elasticsearch.xpack.esql.planner.Layout; import org.elasticsearch.xpack.esql.planner.PlannerUtils; import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Attribute; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.FieldAttribute; import org.elasticsearch.xpack.ql.expression.Literal; @@ -150,11 +152,17 @@ protected static Iterable parameterSuppliersFromTypedData(List values) { - return new Page(BlockUtils.fromListRow(BlockFactory.getNonBreakingInstance(), values)); + return new Page(BlockUtils.fromListRow(TestBlockFactory.getNonBreakingInstance(), values)); } /** * Hack together a layout by scanning for Fields. * Those will show up in the layout in whatever order a depth first traversal finds them. */ - protected void buildLayout(Layout.Builder builder, Expression e) { + protected static void buildLayout(Layout.Builder builder, Expression e) { if (e instanceof FieldAttribute f) { builder.append(f); return; @@ -239,7 +251,8 @@ private void testEvaluate(boolean readFloating) { assertFalse("expected resolved", expression.typeResolved().unresolved()); expression = new FoldNull().rule(expression); assertThat(expression.dataType(), equalTo(testCase.expectedType)); - // TODO should we convert unsigned_long into BigDecimal so it's easier to assert? + logger.info("Result type: " + expression.dataType()); + Object result; try (ExpressionEvaluator evaluator = evaluator(expression).get(driverContext())) { try (Block block = evaluator.eval(row(testCase.getDataValues()))) { @@ -433,13 +446,14 @@ public final void testSimpleWithNulls() { // TODO replace this with nulls insert assumeTrue("All test data types must be representable in order to build fields", testCase.allTypesAreRepresentable()); List simpleData = testCase.getDataValues(); try (EvalOperator.ExpressionEvaluator eval = evaluator(buildFieldExpression(testCase)).get(driverContext())) { - Block[] orig = BlockUtils.fromListRow(BlockFactory.getNonBreakingInstance(), simpleData); + BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); + Block[] orig = BlockUtils.fromListRow(blockFactory, simpleData); for (int i = 0; i < orig.length; i++) { List data = new ArrayList<>(); Block[] blocks = new Block[orig.length]; for (int b = 0; b < blocks.length; b++) { if (b == i) { - blocks[b] = orig[b].elementType().newBlockBuilder(1).appendNull().build(); + blocks[b] = orig[b].elementType().newBlockBuilder(1, blockFactory).appendNull().build(); data.add(null); } else { blocks[b] = orig[b]; @@ -741,6 +755,10 @@ protected static List failureForCasesWithoutExamples(List suppliers) { String bad = suppliers.stream().filter(s -> s.types() == null).map(s -> s.name()).collect(Collectors.joining("\n")); if (bad.equals("") == false) { @@ -886,6 +904,21 @@ private static String typeErrorMessage(boolean includeOrdinal, List forBinaryCastingToDouble( List warnings ) { List suppliers = new ArrayList<>(); + casesCrossProduct( + (l, r) -> expected.apply(((Number) l).doubleValue(), ((Number) r).doubleValue()), + lhsSuppliers, + rhsSuppliers, + (lhsType, rhsType) -> name + + "[" + + lhsName + + "=" + + castToDoubleEvaluator("Attribute[channel=0]", lhsType) + + ", " + + rhsName + + "=" + + castToDoubleEvaluator("Attribute[channel=1]", rhsType) + + "]", + warnings, + suppliers, + DataTypes.DOUBLE + ); + return suppliers; + } + + private static void casesCrossProduct( + BinaryOperator expected, + List lhsSuppliers, + List rhsSuppliers, + BiFunction evaluatorToString, + List warnings, + List suppliers, + DataType expectedType + ) { for (TypedDataSupplier lhsSupplier : lhsSuppliers) { for (TypedDataSupplier rhsSupplier : rhsSuppliers) { String caseName = lhsSupplier.name() + ", " + rhsSupplier.name(); suppliers.add(new TestCaseSupplier(caseName, List.of(lhsSupplier.type(), rhsSupplier.type()), () -> { - Number lhs = (Number) lhsSupplier.supplier().get(); - Number rhs = (Number) rhsSupplier.supplier().get(); + Object lhs = lhsSupplier.supplier().get(); + Object rhs = rhsSupplier.supplier().get(); TypedData lhsTyped = new TypedData( // TODO there has to be a better way to handle unsigned long lhs instanceof BigInteger b ? NumericUtils.asLongUnsigned(b) : lhs, @@ -186,13 +219,11 @@ public static List forBinaryCastingToDouble( rhsSupplier.type(), "rhs" ); - String lhsEvalName = castToDoubleEvaluator("Attribute[channel=0]", lhsSupplier.type()); - String rhsEvalName = castToDoubleEvaluator("Attribute[channel=1]", rhsSupplier.type()); TestCase testCase = new TestCase( List.of(lhsTyped, rhsTyped), - name + "[" + lhsName + "=" + lhsEvalName + ", " + rhsName + "=" + rhsEvalName + "]", - DataTypes.DOUBLE, - equalTo(expected.apply(lhs.doubleValue(), rhs.doubleValue())) + evaluatorToString.apply(lhsSupplier.type(), rhsSupplier.type()), + expectedType, + equalTo(expected.apply(lhs, rhs)) ); for (String warning : warnings) { testCase = testCase.withWarning(warning); @@ -201,8 +232,6 @@ public static List forBinaryCastingToDouble( })); } } - - return suppliers; } public static List castToDoubleSuppliersFromRange(Double Min, Double Max) { @@ -214,6 +243,146 @@ public static List castToDoubleSuppliersFromRange(Double Min, return suppliers; } + public static List forBinaryNumericNotCasting( + String name, + String lhsName, + String rhsName, + BinaryOperator expected, + DataType expectedType, + List lhsSuppliers, + List rhsSuppliers, + List warnings, + boolean symetric + ) { + return forBinaryNotCasting( + name, + lhsName, + rhsName, + (lhs, rhs) -> expected.apply((Number) lhs, (Number) rhs), + expectedType, + lhsSuppliers, + rhsSuppliers, + warnings, + symetric + ); + } + + public record NumericTypeTestConfig(Number min, Number max, BinaryOperator expected, String evaluatorName) {} + + public record NumericTypeTestConfigs( + NumericTypeTestConfig intStuff, + NumericTypeTestConfig longStuff, + NumericTypeTestConfig doubleStuff + ) { + public NumericTypeTestConfig get(DataType type) { + if (type == DataTypes.INTEGER) { + return intStuff; + } + if (type == DataTypes.LONG) { + return longStuff; + } + if (type == DataTypes.DOUBLE) { + return doubleStuff; + } + throw new IllegalArgumentException("bogus numeric type [" + type + "]"); + } + } + + private static DataType widen(DataType lhs, DataType rhs) { + if (lhs == rhs) { + return lhs; + } + if (lhs == DataTypes.DOUBLE || rhs == DataTypes.DOUBLE) { + return DataTypes.DOUBLE; + } + if (lhs == DataTypes.LONG || rhs == DataTypes.LONG) { + return DataTypes.LONG; + } + throw new IllegalArgumentException("Invalid numeric widening lhs: [" + lhs + "] rhs: [" + rhs + "]"); + } + + private static List getSuppliersForNumericType(DataType type, Number min, Number max) { + if (type == DataTypes.INTEGER) { + return intCases(NumericUtils.saturatingIntValue(min), NumericUtils.saturatingIntValue(max)); + } + if (type == DataTypes.LONG) { + return longCases(min.longValue(), max.longValue()); + } + if (type == DataTypes.UNSIGNED_LONG) { + return ulongCases( + min instanceof BigInteger ? (BigInteger) min : BigInteger.valueOf(Math.max(min.longValue(), 0L)), + max instanceof BigInteger ? (BigInteger) max : BigInteger.valueOf(Math.max(max.longValue(), 0L)) + ); + } + if (type == DataTypes.DOUBLE) { + return doubleCases(min.doubleValue(), max.doubleValue()); + } + throw new IllegalArgumentException("bogus numeric type [" + type + "]"); + } + + public static List forBinaryWithWidening( + NumericTypeTestConfigs typeStuff, + String lhsName, + String rhsName, + List warnings + ) { + List suppliers = new ArrayList<>(); + List numericTypes = List.of(DataTypes.INTEGER, DataTypes.LONG, DataTypes.DOUBLE); + + for (DataType lhsType : numericTypes) { + for (DataType rhsType : numericTypes) { + DataType expected = widen(lhsType, rhsType); + NumericTypeTestConfig expectedTypeStuff = typeStuff.get(expected); + String evaluator = expectedTypeStuff.evaluatorName() + + "[" + + lhsName + + "=" + + getCastEvaluator("Attribute[channel=0]", lhsType, expected) + + ", " + + rhsName + + "=" + + getCastEvaluator("Attribute[channel=1]", rhsType, expected) + + "]"; + casesCrossProduct( + (l, r) -> expectedTypeStuff.expected().apply((Number) l, (Number) r), + getSuppliersForNumericType(lhsType, expectedTypeStuff.min(), expectedTypeStuff.max()), + getSuppliersForNumericType(rhsType, expectedTypeStuff.min(), expectedTypeStuff.max()), + // TODO: This doesn't really need to be a function + (lt, rt) -> evaluator, + warnings, + suppliers, + expected + ); + } + } + + return suppliers; + } + + public static List forBinaryNotCasting( + String name, + String lhsName, + String rhsName, + BinaryOperator expected, + DataType expectedType, + List lhsSuppliers, + List rhsSuppliers, + List warnings, + boolean symetric + ) { + List suppliers = new ArrayList<>(); + casesCrossProduct( + expected, + lhsSuppliers, + rhsSuppliers, + (lhsType, rhsType) -> name + "[" + lhsName + "=Attribute[channel=0], " + rhsName + "=Attribute[channel=1]]", + warnings, + suppliers, + expectedType + ); + return suppliers; + } + /** * Generate positive test cases for a unary function operating on an {@link DataTypes#INTEGER}. */ @@ -722,7 +891,7 @@ private static List booleanCases() { ); } - private static List dateCases() { + public static List dateCases() { return List.of( new TypedDataSupplier("<1970-01-01T00:00:00Z>", () -> 0L, DataTypes.DATETIME), new TypedDataSupplier( @@ -739,17 +908,43 @@ private static List dateCases() { ); } - private static List geoPointCases() { + public static List datePeriodCases() { + return List.of( + new TypedDataSupplier("", () -> Period.ZERO, EsqlDataTypes.DATE_PERIOD), + new TypedDataSupplier( + "", + () -> Period.of( + ESTestCase.randomIntBetween(-1000, 1000), + ESTestCase.randomIntBetween(-13, 13), + ESTestCase.randomIntBetween(-32, 32) + ), + EsqlDataTypes.DATE_PERIOD + ) + ); + } + + public static List timeDurationCases() { + return List.of( + new TypedDataSupplier("", () -> Duration.ZERO, EsqlDataTypes.TIME_DURATION), + new TypedDataSupplier( + "", + () -> Duration.ofMillis(ESTestCase.randomLongBetween(-604800000L, 604800000L)), // plus/minus 7 days + EsqlDataTypes.TIME_DURATION + ) + ); + } + + public static List geoPointCases() { return List.of(new TypedDataSupplier("", () -> GEO.pointAsLong(randomGeoPoint()), EsqlDataTypes.GEO_POINT)); } - private static List cartesianPointCases() { + public static List cartesianPointCases() { return List.of( new TypedDataSupplier("", () -> CARTESIAN.pointAsLong(randomCartesianPoint()), EsqlDataTypes.CARTESIAN_POINT) ); } - private static List ipCases() { + public static List ipCases() { return List.of( new TypedDataSupplier( "<127.0.0.1 ip>", @@ -811,6 +1006,54 @@ public static List versionCases(String prefix) { ); } + private static String getCastEvaluator(String original, DataType current, DataType target) { + if (current == target) { + return original; + } + if (target == DataTypes.LONG) { + return castToLongEvaluator(original, current); + } + if (target == DataTypes.UNSIGNED_LONG) { + return castToUnsignedLongEvaluator(original, current); + } + if (target == DataTypes.DOUBLE) { + return castToDoubleEvaluator(original, current); + } + throw new IllegalArgumentException("Invalid numeric cast to [" + target + "]"); + } + + private static String castToLongEvaluator(String original, DataType current) { + if (current == DataTypes.LONG) { + return original; + } + if (current == DataTypes.INTEGER) { + return "CastIntToLongEvaluator[v=" + original + "]"; + } + if (current == DataTypes.DOUBLE) { + return "CastDoubleToLongEvaluator[v=" + original + "]"; + } + if (current == DataTypes.UNSIGNED_LONG) { + return "CastUnsignedLongToLong[v=" + original + "]"; + } + throw new UnsupportedOperationException(); + } + + private static String castToUnsignedLongEvaluator(String original, DataType current) { + if (current == DataTypes.UNSIGNED_LONG) { + return original; + } + if (current == DataTypes.INTEGER) { + return "CastIntToUnsignedLongEvaluator[v=" + original + "]"; + } + if (current == DataTypes.LONG) { + return "CastLongToUnsignedLongEvaluator[v=" + original + "]"; + } + if (current == DataTypes.DOUBLE) { + return "CastDoubleToUnsignedLongEvaluator[v=" + original + "]"; + } + throw new UnsupportedOperationException(); + } + private static String castToDoubleEvaluator(String original, DataType current) { if (current == DataTypes.DOUBLE) { return original; diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java index 838044c8b90f6..90692d5b19df1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/conditional/CaseTests.java @@ -12,8 +12,8 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; @@ -88,11 +88,15 @@ protected Expression build(Source source, List args) { public void testEvalCase() { testCase(caseExpr -> { + DriverContext driverContext = driverContext(); + Page page = new Page(driverContext.blockFactory().newConstantIntBlockWith(0, 1)); try ( - EvalOperator.ExpressionEvaluator eval = caseExpr.toEvaluator(child -> evaluator(child)).get(driverContext()); - Block block = eval.eval(new Page(IntBlock.newConstantBlockWith(0, 1))) + EvalOperator.ExpressionEvaluator eval = caseExpr.toEvaluator(child -> evaluator(child)).get(driverContext); + Block block = eval.eval(page) ) { return toJavaObject(block, 0); + } finally { + page.releaseBlocks(); } }); } @@ -148,7 +152,8 @@ public void testCaseWithIncompatibleTypes() { public void testCaseIsLazy() { Case caseExpr = caseExpr(true, 1, true, 2); - try (Block block = caseExpr.toEvaluator(child -> { + DriverContext driveContext = driverContext(); + EvalOperator.ExpressionEvaluator evaluator = caseExpr.toEvaluator(child -> { Object value = child.fold(); if (value != null && value.equals(2)) { return dvrCtx -> new EvalOperator.ExpressionEvaluator() { @@ -163,8 +168,12 @@ public void close() {} }; } return evaluator(child); - }).get(driverContext()).eval(new Page(IntBlock.newConstantBlockWith(0, 1)))) { + }).get(driveContext); + Page page = new Page(driveContext.blockFactory().newConstantIntBlockWith(0, 1)); + try (Block block = evaluator.eval(page)) { assertEquals(1, toJavaObject(block, 0)); + } finally { + page.releaseBlocks(); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java new file mode 100644 index 0000000000000..b0e9a79698f90 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToCartesianPointTests.java @@ -0,0 +1,99 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.NumericUtils; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.CARTESIAN; + +public class ToCartesianPointTests extends AbstractFunctionTestCase { + public ToCartesianPointTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + final String attribute = "Attribute[channel=0]"; + final Function evaluatorName = s -> "ToCartesianPoint" + s + "Evaluator[field=" + attribute + "]"; + final List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryCartesianPoint(suppliers, attribute, EsqlDataTypes.CARTESIAN_POINT, l -> l, List.of()); + TestCaseSupplier.forUnaryLong( + suppliers, + attribute, + EsqlDataTypes.CARTESIAN_POINT, + l -> l, + Long.MIN_VALUE, + Long.MAX_VALUE, + List.of() + ); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + attribute, + EsqlDataTypes.CARTESIAN_POINT, + NumericUtils::asLongUnsigned, + BigInteger.ZERO, + UNSIGNED_LONG_MAX, + List.of() + ); + + // random strings that don't look like a cartesian point + TestCaseSupplier.forUnaryStrings( + suppliers, + evaluatorName.apply("FromString"), + EsqlDataTypes.CARTESIAN_POINT, + bytesRef -> null, + bytesRef -> { + var exception = expectThrows(Exception.class, () -> CARTESIAN.stringAsPoint(bytesRef.utf8ToString())); + return List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: " + exception + ); + } + ); + // strings that are cartesian point representations + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("FromString"), + List.of( + new TestCaseSupplier.TypedDataSupplier( + "", + () -> new BytesRef(CARTESIAN.pointAsString(randomCartesianPoint())), + DataTypes.KEYWORD + ) + ), + EsqlDataTypes.CARTESIAN_POINT, + bytesRef -> CARTESIAN.pointAsLong(CARTESIAN.stringAsPoint(((BytesRef) bytesRef).utf8ToString())), + List.of() + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToCartesianPoint(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java index ebcaf367b1226..0309bcce85581 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToDoubleTests.java @@ -54,17 +54,13 @@ public static Iterable parameters() { List.of() ); // random strings that don't look like a double - TestCaseSupplier.forUnaryStrings( - suppliers, - evaluatorName.apply("String"), - DataTypes.DOUBLE, - bytesRef -> null, - bytesRef -> List.of( + TestCaseSupplier.forUnaryStrings(suppliers, evaluatorName.apply("String"), DataTypes.DOUBLE, bytesRef -> null, bytesRef -> { + var exception = expectThrows(NumberFormatException.class, () -> Double.parseDouble(bytesRef.utf8ToString())); + return List.of( "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", - "Line -1:-1: java.lang.NumberFormatException: " - + (bytesRef.utf8ToString().isEmpty() ? "empty String" : ("For input string: \"" + bytesRef.utf8ToString() + "\"")) - ) - ); + "Line -1:-1: " + exception + ); + }); TestCaseSupplier.forUnaryUnsignedLong( suppliers, evaluatorName.apply("UnsignedLong"), diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java new file mode 100644 index 0000000000000..6a8198ca12b4c --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/convert/ToGeoPointTests.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.function.scalar.convert; + +import com.carrotsearch.randomizedtesting.annotations.Name; +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; +import org.elasticsearch.xpack.esql.type.EsqlDataTypes; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.elasticsearch.xpack.ql.util.NumericUtils; + +import java.math.BigInteger; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Function; +import java.util.function.Supplier; + +import static org.elasticsearch.xpack.ql.util.SpatialCoordinateTypes.GEO; + +public class ToGeoPointTests extends AbstractFunctionTestCase { + public ToGeoPointTests(@Name("TestCase") Supplier testCaseSupplier) { + this.testCase = testCaseSupplier.get(); + } + + @ParametersFactory + public static Iterable parameters() { + // TODO multivalue fields + final String attribute = "Attribute[channel=0]"; + final Function evaluatorName = s -> "ToGeoPoint" + s + "Evaluator[field=" + attribute + "]"; + final List suppliers = new ArrayList<>(); + + TestCaseSupplier.forUnaryGeoPoint(suppliers, attribute, EsqlDataTypes.GEO_POINT, l -> l, List.of()); + TestCaseSupplier.forUnaryLong(suppliers, attribute, EsqlDataTypes.GEO_POINT, l -> l, Long.MIN_VALUE, Long.MAX_VALUE, List.of()); + TestCaseSupplier.forUnaryUnsignedLong( + suppliers, + attribute, + EsqlDataTypes.GEO_POINT, + NumericUtils::asLongUnsigned, + BigInteger.ZERO, + UNSIGNED_LONG_MAX, + List.of() + ); + + // random strings that don't look like a geo point + TestCaseSupplier.forUnaryStrings( + suppliers, + evaluatorName.apply("FromString"), + EsqlDataTypes.GEO_POINT, + bytesRef -> null, + bytesRef -> { + var exception = expectThrows(Exception.class, () -> GEO.stringAsPoint(bytesRef.utf8ToString())); + return List.of( + "Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.", + "Line -1:-1: " + exception + ); + } + ); + // strings that are geo point representations + TestCaseSupplier.unary( + suppliers, + evaluatorName.apply("FromString"), + List.of( + new TestCaseSupplier.TypedDataSupplier( + "", + () -> new BytesRef(GEO.pointAsString(randomGeoPoint())), + DataTypes.KEYWORD + ) + ), + EsqlDataTypes.GEO_POINT, + bytesRef -> GEO.pointAsLong(GEO.stringAsPoint(((BytesRef) bytesRef).utf8ToString())), + List.of() + ); + + return parameterSuppliersFromTypedData(errorsForCasesWithoutExamples(anyNullIsNull(true, suppliers))); + } + + @Override + protected Expression build(Source source, List args) { + return new ToGeoPoint(source, args.get(0)); + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java index abaa382637882..9854dfbe11460 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/scalar/string/SplitTests.java @@ -12,8 +12,9 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.xpack.esql.expression.function.TestCaseSupplier; import org.elasticsearch.xpack.esql.expression.function.scalar.AbstractScalarFunctionTestCase; @@ -84,10 +85,11 @@ protected Expression build(Source source, List args) { } public void testConstantDelimiter() { + DriverContext driverContext = driverContext(); try ( EvalOperator.ExpressionEvaluator eval = evaluator( new Split(Source.EMPTY, field("str", DataTypes.KEYWORD), new Literal(Source.EMPTY, new BytesRef(":"), DataTypes.KEYWORD)) - ).get(driverContext()) + ).get(driverContext) ) { /* * 58 is ascii for : and appears in the toString below. We don't convert the delimiter to a @@ -96,8 +98,12 @@ public void testConstantDelimiter() { */ assert ':' == 58; assertThat(eval.toString(), equalTo("SplitSingleByteEvaluator[str=Attribute[channel=0], delim=58]")); - try (Block block = eval.eval(new Page(BytesRefBlock.newConstantBlockWith(new BytesRef("foo:bar"), 1)))) { + BlockFactory blockFactory = driverContext.blockFactory(); + Page page = new Page(blockFactory.newConstantBytesRefBlockWith(new BytesRef("foo:bar"), 1)); + try (Block block = eval.eval(page)) { assertThat(toJavaObject(block, 0), equalTo(List.of(new BytesRef("foo"), new BytesRef("bar")))); + } finally { + page.releaseBlocks(); } } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/BreakerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/BreakerTests.java new file mode 100644 index 0000000000000..a09cb68c893e0 --- /dev/null +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/BreakerTests.java @@ -0,0 +1,92 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.esql.expression.predicate.operator; + +import com.carrotsearch.randomizedtesting.annotations.ParametersFactory; + +import org.elasticsearch.common.breaker.CircuitBreaker; +import org.elasticsearch.common.breaker.CircuitBreakingException; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.MockBigArrays; +import org.elasticsearch.common.util.PageCacheRecycler; +import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.Page; +import org.elasticsearch.compute.operator.DriverContext; +import org.elasticsearch.compute.operator.EvalOperator; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.expression.function.AbstractFunctionTestCase; +import org.elasticsearch.xpack.esql.expression.predicate.operator.arithmetic.Div; +import org.elasticsearch.xpack.ql.expression.Expression; +import org.elasticsearch.xpack.ql.expression.Literal; +import org.elasticsearch.xpack.ql.tree.Source; +import org.elasticsearch.xpack.ql.type.DataTypes; +import org.junit.After; + +import java.util.ArrayList; +import java.util.List; + +import static org.hamcrest.Matchers.equalTo; + +public class BreakerTests extends ESTestCase { + @ParametersFactory + public static Iterable parameters() { + List params = new ArrayList<>(); + + Expression expression = new Div( + Source.synthetic("[1] / (long) 2"), + AbstractFunctionTestCase.field("f", DataTypes.LONG), + new Literal(Source.EMPTY, 2, DataTypes.INTEGER) + ); + for (int b = 0; b < 136; b++) { + params.add(new Object[] { ByteSizeValue.ofBytes(b), expression }); + } + return params; + } + + private final List breakers = new ArrayList<>(); + + private final ByteSizeValue limit; + private final Expression expression; + + public BreakerTests(ByteSizeValue limit, Expression expression) { + this.limit = limit; + this.expression = expression; + } + + public void testBreaker() { + DriverContext unlimited = driverContext(ByteSizeValue.ofGb(1)); + DriverContext context = driverContext(limit); + EvalOperator.ExpressionEvaluator eval = AbstractFunctionTestCase.evaluator(expression).get(context); + try (Block b = unlimited.blockFactory().newConstantNullBlock(1)) { + Exception e = expectThrows(CircuitBreakingException.class, () -> eval.eval(new Page(b))); + assertThat(e.getMessage(), equalTo("over test limit")); + } + } + + /** + * A {@link DriverContext} that won't throw {@link CircuitBreakingException}. + */ + private DriverContext driverContext(ByteSizeValue limit) { + BigArrays bigArrays = new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, limit).withCircuitBreaking(); + CircuitBreaker breaker = bigArrays.breakerService().getBreaker(CircuitBreaker.REQUEST); + breakers.add(breaker); + return new DriverContext(bigArrays, new BlockFactory(breaker, bigArrays)); + } + + @After + public void allBreakersEmpty() throws Exception { + // first check that all big arrays are released, which can affect breakers + MockBigArrays.ensureAllArraysAreReleased(); + + for (CircuitBreaker breaker : breakers) { + assertThat("Unexpected used in breaker: " + breaker, breaker.getUsed(), equalTo(0L)); + } + } +} diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java index cc52a965e70b6..91f5a80076626 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/predicate/operator/arithmetic/AddTests.java @@ -21,6 +21,7 @@ import java.time.Duration; import java.time.Period; import java.time.temporal.TemporalAmount; +import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; @@ -31,7 +32,6 @@ import static org.elasticsearch.xpack.ql.type.DateUtils.asMillis; import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; import static org.elasticsearch.xpack.ql.util.NumericUtils.unsignedLongAsBigInteger; -import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; @@ -42,131 +42,138 @@ public AddTests(@Name("TestCase") Supplier testCaseSu @ParametersFactory public static Iterable parameters() { - return parameterSuppliersFromTypedData(List.of(new TestCaseSupplier("Int + Int", () -> { - // Ensure we don't have an overflow - int rhs = randomIntBetween((Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1); - int lhs = randomIntBetween((Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.INTEGER, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.INTEGER, "rhs") - ), - "AddIntsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", - DataTypes.INTEGER, - equalTo(lhs + rhs) - ); - }), new TestCaseSupplier("Long + Long", () -> { - // Ensure we don't have an overflow - long rhs = randomLongBetween((Long.MIN_VALUE >> 1) - 1, (Long.MAX_VALUE >> 1) - 1); - long lhs = randomLongBetween((Long.MIN_VALUE >> 1) - 1, (Long.MAX_VALUE >> 1) - 1); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.LONG, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.LONG, "rhs") - ), - "AddLongsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", - DataTypes.LONG, - equalTo(lhs + rhs) - ); - }), new TestCaseSupplier("Double + Double", () -> { - double rhs = randomDouble(); - double lhs = randomDouble(); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.DOUBLE, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.DOUBLE, "rhs") - ), - "AddDoublesEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", - DataTypes.DOUBLE, - equalTo(lhs + rhs) - ); - })/*, new TestCaseSupplier("ULong + ULong", () -> { - // Ensure we don't have an overflow - // TODO: we should be able to test values over Long.MAX_VALUE too... - long rhs = randomLongBetween(0, (Long.MAX_VALUE >> 1) - 1); - long lhs = randomLongBetween(0, (Long.MAX_VALUE >> 1) - 1); - BigInteger lhsBI = unsignedLongAsBigInteger(lhs); - BigInteger rhsBI = unsignedLongAsBigInteger(rhs); - return new TestCase( - Source.EMPTY, - List.of(new TypedData(lhs, DataTypes.UNSIGNED_LONG, "lhs"), new TypedData(rhs, DataTypes.UNSIGNED_LONG, "rhs")), - "AddUnsignedLongsEvaluator[lhs=Attribute[channel=0], rhs=Attribute[channel=1]]", - equalTo(asLongUnsigned(lhsBI.add(rhsBI).longValue())) - ); - }) */, new TestCaseSupplier("Datetime + Period", () -> { - long lhs = (Long) randomLiteral(DataTypes.DATETIME).value(); - Period rhs = (Period) randomLiteral(EsqlDataTypes.DATE_PERIOD).value(); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.DATETIME, "lhs"), - new TestCaseSupplier.TypedData(rhs, EsqlDataTypes.DATE_PERIOD, "rhs") + List suppliers = new ArrayList<>(); + suppliers.addAll( + TestCaseSupplier.forBinaryWithWidening( + new TestCaseSupplier.NumericTypeTestConfigs( + new TestCaseSupplier.NumericTypeTestConfig( + (Integer.MIN_VALUE >> 1) - 1, + (Integer.MAX_VALUE >> 1) - 1, + (l, r) -> l.intValue() + r.intValue(), + "AddIntsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig( + (Long.MIN_VALUE >> 1) - 1, + (Long.MAX_VALUE >> 1) - 1, + (l, r) -> l.longValue() + r.longValue(), + "AddLongsEvaluator" + ), + new TestCaseSupplier.NumericTypeTestConfig( + Double.NEGATIVE_INFINITY, + Double.POSITIVE_INFINITY, + (l, r) -> l.doubleValue() + r.doubleValue(), + "AddDoublesEvaluator" + ) ), + "lhs", + "rhs", + List.of() + ) + ); + + // Unsigned Long cases + // TODO: These should be integrated into the type cross product above, but are currently broken + // see https://github.com/elastic/elasticsearch/issues/102935 + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + "AddUnsignedLongsEvaluator", + "lhs", + "rhs", + (l, r) -> (((BigInteger) l).add((BigInteger) r)), + DataTypes.UNSIGNED_LONG, + TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE)), + TestCaseSupplier.ulongCases(BigInteger.ZERO, BigInteger.valueOf(Long.MAX_VALUE)), + List.of(), + false + ) + ); + + // AwaitsFix https://github.com/elastic/elasticsearch/issues/103085 + // After fixing that issue, please move this line to below where the date cases are generated + suppliers = anyNullIsNull(true, suppliers); + + // Datetime Cases + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( // TODO: There is an evaluator for Datetime + Period, so it should be tested. Similarly below. "No evaluator, the tests only trigger the folding code since Period is not representable", + "lhs", + "rhs", + (lhs, rhs) -> { + // this weird casting dance makes the expected value lambda symmetric + Long date; + Period period; + if (lhs instanceof Long) { + date = (Long) lhs; + period = (Period) rhs; + } else { + date = (Long) rhs; + period = (Period) lhs; + } + return asMillis(asDateTime(date).plus(period)); + }, DataTypes.DATETIME, - equalTo(asMillis(asDateTime(lhs).plus(rhs))) - ); - }), new TestCaseSupplier("Period + Datetime", () -> { - Period lhs = (Period) randomLiteral(EsqlDataTypes.DATE_PERIOD).value(); - long rhs = (Long) randomLiteral(DataTypes.DATETIME).value(); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, EsqlDataTypes.DATE_PERIOD, "lhs"), - new TestCaseSupplier.TypedData(rhs, DataTypes.DATETIME, "rhs") - ), + TestCaseSupplier.dateCases(), + TestCaseSupplier.datePeriodCases(), + List.of(), + true + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( "No evaluator, the tests only trigger the folding code since Period is not representable", - DataTypes.DATETIME, - equalTo(asMillis(asDateTime(rhs).plus(lhs))) - ); - }), new TestCaseSupplier("Period + Period", () -> { - Period lhs = (Period) randomLiteral(EsqlDataTypes.DATE_PERIOD).value(); - Period rhs = (Period) randomLiteral(EsqlDataTypes.DATE_PERIOD).value(); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, EsqlDataTypes.DATE_PERIOD, "lhs"), - new TestCaseSupplier.TypedData(rhs, EsqlDataTypes.DATE_PERIOD, "rhs") - ), - "Only folding possible, so there's no evaluator", + "lhs", + "rhs", + (lhs, rhs) -> ((Period) lhs).plus((Period) rhs), EsqlDataTypes.DATE_PERIOD, - equalTo(lhs.plus(rhs)) - ); - }), new TestCaseSupplier("Datetime + Duration", () -> { - long lhs = (Long) randomLiteral(DataTypes.DATETIME).value(); - Duration rhs = (Duration) randomLiteral(EsqlDataTypes.TIME_DURATION).value(); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.DATETIME, "lhs"), - new TestCaseSupplier.TypedData(rhs, EsqlDataTypes.TIME_DURATION, "rhs") - ), + TestCaseSupplier.datePeriodCases(), + TestCaseSupplier.datePeriodCases(), + List.of(), + false + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( + // TODO: There is an evaluator for Datetime + Duration, so it should be tested. Similarly below. "No evaluator, the tests only trigger the folding code since Duration is not representable", + "lhs", + "rhs", + (lhs, rhs) -> { + // this weird casting dance makes the expected value lambda symmetric + Long date; + Duration duration; + if (lhs instanceof Long) { + date = (Long) lhs; + duration = (Duration) rhs; + } else { + date = (Long) rhs; + duration = (Duration) lhs; + } + return asMillis(asDateTime(date).plus(duration)); + }, DataTypes.DATETIME, - equalTo(asMillis(asDateTime(lhs).plus(rhs))) - ); - }), new TestCaseSupplier("Duration + Datetime", () -> { - long lhs = (Long) randomLiteral(DataTypes.DATETIME).value(); - Duration rhs = (Duration) randomLiteral(EsqlDataTypes.TIME_DURATION).value(); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, DataTypes.DATETIME, "lhs"), - new TestCaseSupplier.TypedData(rhs, EsqlDataTypes.TIME_DURATION, "rhs") - ), + TestCaseSupplier.dateCases(), + TestCaseSupplier.timeDurationCases(), + List.of(), + true + ) + ); + suppliers.addAll( + TestCaseSupplier.forBinaryNotCasting( "No evaluator, the tests only trigger the folding code since Duration is not representable", - DataTypes.DATETIME, - equalTo(asMillis(asDateTime(lhs).plus(rhs))) - ); - }), new TestCaseSupplier("Duration + Duration", () -> { - Duration lhs = (Duration) randomLiteral(EsqlDataTypes.TIME_DURATION).value(); - Duration rhs = (Duration) randomLiteral(EsqlDataTypes.TIME_DURATION).value(); - return new TestCaseSupplier.TestCase( - List.of( - new TestCaseSupplier.TypedData(lhs, EsqlDataTypes.TIME_DURATION, "lhs"), - new TestCaseSupplier.TypedData(rhs, EsqlDataTypes.TIME_DURATION, "rhs") - ), - "Only folding possible, so there's no evaluator", + "lhs", + "rhs", + (lhs, rhs) -> ((Duration) lhs).plus((Duration) rhs), EsqlDataTypes.TIME_DURATION, - equalTo(lhs.plus(rhs)) - ); - }), new TestCaseSupplier("MV", () -> { + TestCaseSupplier.timeDurationCases(), + TestCaseSupplier.timeDurationCases(), + List.of(), + false + ) + ); + + // Cases that should generate warnings + suppliers.addAll(List.of(new TestCaseSupplier("MV", () -> { // Ensure we don't have an overflow int rhs = randomIntBetween((Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1); int lhs = randomIntBetween((Integer.MIN_VALUE >> 1) - 1, (Integer.MAX_VALUE >> 1) - 1); @@ -182,6 +189,8 @@ public static Iterable parameters() { ).withWarning("Line -1:-1: evaluation of [] failed, treating result as null. Only first 20 failures recorded.") .withWarning("Line -1:-1: java.lang.IllegalArgumentException: single-value function encountered multi-value"); }))); + + return parameterSuppliersFromTypedData(suppliers); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java index 9430e984039fe..545a2893270b7 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatTests.java @@ -8,14 +8,13 @@ package org.elasticsearch.xpack.esql.formatter; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.IntArrayVector; -import org.elasticsearch.compute.data.LongArrayVector; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.xcontent.NamedXContentRegistry; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; import org.elasticsearch.xpack.ql.util.StringUtils; @@ -39,6 +38,8 @@ public class TextFormatTests extends ESTestCase { + static final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); + public void testCsvContentType() { assertEquals("text/csv; charset=utf-8; header=present", CSV.contentType(req())); } @@ -231,15 +232,16 @@ public void testPlainTextEmptyCursorWithColumns() { public void testPlainTextEmptyCursorWithoutColumns() { assertEquals( StringUtils.EMPTY, - getTextBodyContent(PLAIN_TEXT.format(req(), new EsqlQueryResponse(emptyList(), emptyList(), null, false))) + getTextBodyContent(PLAIN_TEXT.format(req(), new EsqlQueryResponse(emptyList(), emptyList(), null, false, false))) ); } private static EsqlQueryResponse emptyData() { - return new EsqlQueryResponse(singletonList(new ColumnInfo("name", "keyword")), emptyList(), null, false); + return new EsqlQueryResponse(singletonList(new ColumnInfo("name", "keyword")), emptyList(), null, false, false); } private static EsqlQueryResponse regularData() { + BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); // headers List headers = asList( new ColumnInfo("string", "keyword"), @@ -250,16 +252,16 @@ private static EsqlQueryResponse regularData() { // values List values = List.of( new Page( - BytesRefBlock.newBlockBuilder(2) + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(new BytesRef("Along The River Bank")) .appendBytesRef(new BytesRef("Mind Train")) .build(), - new IntArrayVector(new int[] { 11 * 60 + 48, 4 * 60 + 40 }, 2).asBlock(), - new LongArrayVector(new long[] { GEO.pointAsLong(12, 56), GEO.pointAsLong(-97, 26) }, 2).asBlock() + blockFactory.newIntArrayVector(new int[] { 11 * 60 + 48, 4 * 60 + 40 }, 2).asBlock(), + blockFactory.newLongArrayVector(new long[] { GEO.pointAsLong(12, 56), GEO.pointAsLong(-97, 26) }, 2).asBlock() ) ); - return new EsqlQueryResponse(headers, values, null, false); + return new EsqlQueryResponse(headers, values, null, false, false); } private static EsqlQueryResponse escapedData() { @@ -269,15 +271,18 @@ private static EsqlQueryResponse escapedData() { // values List values = List.of( new Page( - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("normal")).appendBytesRef(new BytesRef("commas")).build(), - BytesRefBlock.newBlockBuilder(2) + blockFactory.newBytesRefBlockBuilder(2) + .appendBytesRef(new BytesRef("normal")) + .appendBytesRef(new BytesRef("commas")) + .build(), + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(new BytesRef("\"quo\"ted\",\n")) .appendBytesRef(new BytesRef("a,b,c,\n,d,e,\t\n")) .build() ) ); - return new EsqlQueryResponse(headers, values, null, false); + return new EsqlQueryResponse(headers, values, null, false, false); } private static RestRequest req() { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java index 22e532341d30b..2ad9449f12199 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/formatter/TextFormatterTests.java @@ -9,12 +9,10 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.Strings; -import org.elasticsearch.compute.data.Block; -import org.elasticsearch.compute.data.BytesRefBlock; -import org.elasticsearch.compute.data.DoubleArrayVector; -import org.elasticsearch.compute.data.LongArrayVector; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.Page; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.action.ColumnInfo; import org.elasticsearch.xpack.esql.action.EsqlQueryResponse; @@ -27,6 +25,9 @@ import static org.hamcrest.Matchers.arrayWithSize; public class TextFormatterTests extends ESTestCase { + + static BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); + private final List columns = Arrays.asList( new ColumnInfo("foo", "keyword"), new ColumnInfo("bar", "long"), @@ -42,26 +43,27 @@ public class TextFormatterTests extends ESTestCase { columns, List.of( new Page( - BytesRefBlock.newBlockBuilder(2) + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(new BytesRef("15charwidedata!")) .appendBytesRef(new BytesRef("dog")) .build(), - new LongArrayVector(new long[] { 1, 2 }, 2).asBlock(), - new DoubleArrayVector(new double[] { 6.888, 123124.888 }, 2).asBlock(), - Block.constantNullBlock(2), - new DoubleArrayVector(new double[] { 12, 9912 }, 2).asBlock(), - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("rabbit")).appendBytesRef(new BytesRef("goat")).build(), - new LongArrayVector( + blockFactory.newLongArrayVector(new long[] { 1, 2 }, 2).asBlock(), + blockFactory.newDoubleArrayVector(new double[] { 6.888, 123124.888 }, 2).asBlock(), + blockFactory.newConstantNullBlock(2), + blockFactory.newDoubleArrayVector(new double[] { 12, 9912 }, 2).asBlock(), + blockFactory.newBytesRefBlockBuilder(2).appendBytesRef(new BytesRef("rabbit")).appendBytesRef(new BytesRef("goat")).build(), + blockFactory.newLongArrayVector( new long[] { UTC_DATE_TIME_FORMATTER.parseMillis("1953-09-02T00:00:00.000Z"), UTC_DATE_TIME_FORMATTER.parseMillis("2000-03-15T21:34:37.443Z") }, 2 ).asBlock(), - new LongArrayVector(new long[] { GEO.pointAsLong(12, 56), GEO.pointAsLong(-97, 26) }, 2).asBlock(), - Block.constantNullBlock(2) + blockFactory.newLongArrayVector(new long[] { GEO.pointAsLong(12, 56), GEO.pointAsLong(-97, 26) }, 2).asBlock(), + blockFactory.newConstantNullBlock(2) ) ), null, + randomBoolean(), randomBoolean() ); @@ -108,23 +110,30 @@ public void testFormatWithoutHeader() { columns, List.of( new Page( - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("doggie")).appendBytesRef(new BytesRef("dog")).build(), - new LongArrayVector(new long[] { 4, 2 }, 2).asBlock(), - new DoubleArrayVector(new double[] { 1, 123124.888 }, 2).asBlock(), - Block.constantNullBlock(2), - new DoubleArrayVector(new double[] { 77.0, 9912.0 }, 2).asBlock(), - BytesRefBlock.newBlockBuilder(2).appendBytesRef(new BytesRef("wombat")).appendBytesRef(new BytesRef("goat")).build(), - new LongArrayVector( + blockFactory.newBytesRefBlockBuilder(2) + .appendBytesRef(new BytesRef("doggie")) + .appendBytesRef(new BytesRef("dog")) + .build(), + blockFactory.newLongArrayVector(new long[] { 4, 2 }, 2).asBlock(), + blockFactory.newDoubleArrayVector(new double[] { 1, 123124.888 }, 2).asBlock(), + blockFactory.newConstantNullBlock(2), + blockFactory.newDoubleArrayVector(new double[] { 77.0, 9912.0 }, 2).asBlock(), + blockFactory.newBytesRefBlockBuilder(2) + .appendBytesRef(new BytesRef("wombat")) + .appendBytesRef(new BytesRef("goat")) + .build(), + blockFactory.newLongArrayVector( new long[] { UTC_DATE_TIME_FORMATTER.parseMillis("1955-01-21T01:02:03.342Z"), UTC_DATE_TIME_FORMATTER.parseMillis("2231-12-31T23:59:59.999Z") }, 2 ).asBlock(), - new LongArrayVector(new long[] { GEO.pointAsLong(12, 56), GEO.pointAsLong(-97, 26) }, 2).asBlock(), - Block.constantNullBlock(2) + blockFactory.newLongArrayVector(new long[] { GEO.pointAsLong(12, 56), GEO.pointAsLong(-97, 26) }, 2).asBlock(), + blockFactory.newConstantNullBlock(2) ) ), null, + randomBoolean(), randomBoolean() ); @@ -157,13 +166,14 @@ public void testVeryLongPadding() { List.of(new ColumnInfo("foo", "keyword")), List.of( new Page( - BytesRefBlock.newBlockBuilder(2) + blockFactory.newBytesRefBlockBuilder(2) .appendBytesRef(new BytesRef(smallFieldContent)) .appendBytesRef(new BytesRef(largeFieldContent)) .build() ) ), null, + randomBoolean(), randomBoolean() ) ).format(false) diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java index bc46189e13827..ac2426f485fcc 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalLogicalPlanOptimizerTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.expression.function.EsqlFunctionRegistry; +import org.elasticsearch.xpack.esql.expression.function.scalar.nulls.Coalesce; import org.elasticsearch.xpack.esql.parser.EsqlParser; import org.elasticsearch.xpack.esql.plan.logical.Eval; import org.elasticsearch.xpack.esql.plan.logical.local.LocalRelation; @@ -21,9 +22,11 @@ import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.ReferenceAttribute; +import org.elasticsearch.xpack.ql.expression.predicate.nulls.IsNotNull; import org.elasticsearch.xpack.ql.index.EsIndex; import org.elasticsearch.xpack.ql.index.IndexResolution; import org.elasticsearch.xpack.ql.plan.logical.EsRelation; +import org.elasticsearch.xpack.ql.plan.logical.Filter; import org.elasticsearch.xpack.ql.plan.logical.Limit; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.ql.plan.logical.Project; @@ -35,6 +38,7 @@ import java.util.Map; import static org.elasticsearch.xpack.esql.EsqlTestUtils.L; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_SEARCH_STATS; import static org.elasticsearch.xpack.esql.EsqlTestUtils.TEST_VERIFIER; import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; import static org.elasticsearch.xpack.esql.EsqlTestUtils.loadMapping; @@ -263,6 +267,38 @@ public void testMissingFieldInFilterNoProjection() { ); } + public void testIsNotNullOnCoalesce() { + var plan = localPlan(""" + from test + | where coalesce(emp_no, salary) is not null + """); + + var limit = as(plan, Limit.class); + var filter = as(limit.child(), Filter.class); + var inn = as(filter.condition(), IsNotNull.class); + var coalesce = as(inn.children().get(0), Coalesce.class); + assertThat(Expressions.names(coalesce.children()), contains("emp_no", "salary")); + var source = as(filter.child(), EsRelation.class); + } + + public void testIsNotNullOnExpression() { + var plan = localPlan(""" + from test + | eval x = emp_no + 1 + | where x is not null + """); + + var limit = as(plan, Limit.class); + var filter = as(limit.child(), Filter.class); + var inn = as(filter.condition(), IsNotNull.class); + assertThat(Expressions.names(inn.children()), contains("x")); + var eval = as(filter.child(), Eval.class); + filter = as(eval.child(), Filter.class); + inn = as(filter.condition(), IsNotNull.class); + assertThat(Expressions.names(inn.children()), contains("emp_no")); + var source = as(filter.child(), EsRelation.class); + } + private LocalRelation asEmptyRelation(Object o) { var empty = as(o, LocalRelation.class); assertThat(empty.supplier(), is(LocalSupplier.EMPTY)); @@ -285,6 +321,10 @@ private LogicalPlan localPlan(LogicalPlan plan, SearchStats searchStats) { return localPlan; } + private LogicalPlan localPlan(String query) { + return localPlan(plan(query), TEST_SEARCH_STATS); + } + @Override protected List filteredWarnings() { return withDefaultLimitWarning(super.filteredWarnings()); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java index 12b8185cbec5d..bb600690dbca4 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LocalPhysicalPlanOptimizerTests.java @@ -381,6 +381,28 @@ public boolean exists(String field) { assertThat(Expressions.names(localSource.output()), contains("count", "seen")); } + public void testIsNotNullPushdownFilter() { + var plan = plan("from test | where emp_no is not null"); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var query = as(exchange.child(), EsQueryExec.class); + assertThat(query.limit().fold(), is(500)); + var expected = QueryBuilders.existsQuery("emp_no"); + assertThat(query.query().toString(), is(expected.toString())); + } + + public void testIsNullPushdownFilter() { + var plan = plan("from test | where emp_no is null"); + + var limit = as(plan, LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var query = as(exchange.child(), EsQueryExec.class); + assertThat(query.limit().fold(), is(500)); + var expected = QueryBuilders.boolQuery().mustNot(QueryBuilders.existsQuery("emp_no")); + assertThat(query.query().toString(), is(expected.toString())); + } + private QueryBuilder wrapWithSingleQuery(QueryBuilder inner, String fieldName, Source source) { return FilterTests.singleValueQuery(inner, fieldName, source); } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java index 352dccc046588..cbbb0ab32c5c8 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/LogicalPlanOptimizerTests.java @@ -9,9 +9,9 @@ import org.elasticsearch.common.lucene.BytesRefs; import org.elasticsearch.compute.aggregation.QuantileStates; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.analysis.Analyzer; import org.elasticsearch.xpack.esql.analysis.AnalyzerContext; import org.elasticsearch.xpack.esql.analysis.AnalyzerTestUtils; @@ -301,7 +301,7 @@ public void testMultipleCombineLimits() { var limitWithMinimum = randomIntBetween(0, numberOfLimits - 1); var fa = getFieldAttribute("a", INTEGER); - var relation = localSource(BlockFactory.getNonBreakingInstance(), singletonList(fa), singletonList(1)); + var relation = localSource(TestBlockFactory.getNonBreakingInstance(), singletonList(fa), singletonList(1)); LogicalPlan plan = relation; for (int i = 0; i < numberOfLimits; i++) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java index 1f2bde2526fab..b0d881755453e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/optimizer/PhysicalPlanOptimizerTests.java @@ -1892,6 +1892,110 @@ public boolean exists(String field) { assertThat(Expressions.names(localSourceExec.output()), contains("languages", "min", "seen")); } + /** + * Expects + * intermediate plan + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[COUNT(emp_no{f}#6) AS c],FINAL,null] + * \_ExchangeExec[[count{r}#16, seen{r}#17],true] + * \_FragmentExec[filter=null, estimatedRowSize=0, fragment=[ + * Aggregate[[],[COUNT(emp_no{f}#6) AS c]] + * \_Filter[emp_no{f}#6 > 10[INTEGER]] + * \_EsRelation[test][_meta_field{f}#12, emp_no{f}#6, first_name{f}#7, ge..]]] + * + * and final plan is + * LimitExec[500[INTEGER]] + * \_AggregateExec[[],[COUNT(emp_no{f}#6) AS c],FINAL,8] + * \_ExchangeExec[[count{r}#16, seen{r}#17],true] + * \_LocalSourceExec[[count{r}#16, seen{r}#17],[LongVectorBlock[vector=ConstantLongVector[positions=1, value=0]]]] + */ + public void testPartialAggFoldingOutput() { + var plan = physicalPlan(""" + from test + | where emp_no > 10 + | stats c = count(emp_no) + """); + + var stats = statsForMissingField("emp_no"); + var optimized = optimizedPlan(plan, stats); + + var limit = as(optimized, LimitExec.class); + var agg = as(limit.child(), AggregateExec.class); + var exchange = as(agg.child(), ExchangeExec.class); + assertThat(Expressions.names(exchange.output()), contains("count", "seen")); + var source = as(exchange.child(), LocalSourceExec.class); + assertThat(Expressions.names(source.output()), contains("count", "seen")); + } + + /** + * Checks that when the folding happens on the coordinator, the intermediate agg state + * are not used anymore. + * + * Expects + * LimitExec[10000[INTEGER]] + * \_AggregateExec[[],[COUNT(emp_no{f}#5) AS c],FINAL,8] + * \_AggregateExec[[],[COUNT(emp_no{f}#5) AS c],PARTIAL,8] + * \_LimitExec[10[INTEGER]] + * \_ExchangeExec[[],false] + * \_ProjectExec[[emp_no{r}#5]] + * \_EvalExec[[null[INTEGER] AS emp_no]] + * \_EsQueryExec[test], query[][_doc{f}#26], limit[10], sort[] estimatedRowSize[8] + */ + public void testGlobalAggFoldingOutput() { + var plan = physicalPlan(""" + from test + | limit 10 + | stats c = count(emp_no) + """); + + var stats = statsForMissingField("emp_no"); + var optimized = optimizedPlan(plan, stats); + + var limit = as(optimized, LimitExec.class); + var aggFinal = as(limit.child(), AggregateExec.class); + var aggPartial = as(aggFinal.child(), AggregateExec.class); + assertThat(Expressions.names(aggPartial.output()), contains("c")); + limit = as(aggPartial.child(), LimitExec.class); + var exchange = as(limit.child(), ExchangeExec.class); + var project = as(exchange.child(), ProjectExec.class); + } + + /** + * Checks the folded aggregation preserves the intermediate output. + * + * Expects + * ProjectExec[[a{r}#5]] + * \_EvalExec[[__a_SUM@734e2841{r}#16 / __a_COUNT@12536eab{r}#17 AS a]] + * \_LimitExec[500[INTEGER]] + * \_AggregateExec[[],[SUM(emp_no{f}#6) AS __a_SUM@734e2841, COUNT(emp_no{f}#6) AS __a_COUNT@12536eab],FINAL,24] + * \_ExchangeExec[[sum{r}#18, seen{r}#19, count{r}#20, seen{r}#21],true] + * \_LocalSourceExec[[sum{r}#18, seen{r}#19, count{r}#20, seen{r}#21],[LongArrayBlock[positions=1, mvOrdering=UNORDERED, + * values=[0, + * 0]], BooleanVectorBlock[vector=ConstantBooleanVector[positions=1, value=true]], + * LongVectorBlock[vector=ConstantLongVector[positions=1, value=0]], + * BooleanVectorBlock[vector=ConstantBooleanVector[positions=1, value=true]]]] + */ + public void testPartialAggFoldingOutputForSyntheticAgg() { + var plan = physicalPlan(""" + from test + | where emp_no > 10 + | stats a = avg(emp_no) + """); + + var stats = statsForMissingField("emp_no"); + var optimized = optimizedPlan(plan, stats); + + var project = as(optimized, ProjectExec.class); + var eval = as(project.child(), EvalExec.class); + var limit = as(eval.child(), LimitExec.class); + var aggFinal = as(limit.child(), AggregateExec.class); + assertThat(aggFinal.output(), hasSize(2)); + var exchange = as(aggFinal.child(), ExchangeExec.class); + assertThat(Expressions.names(exchange.output()), contains("sum", "seen", "count", "seen")); + var source = as(exchange.child(), LocalSourceExec.class); + assertThat(Expressions.names(source.output()), contains("sum", "seen", "count", "seen")); + } + private static EsQueryExec source(PhysicalPlan plan) { if (plan instanceof ExchangeExec exchange) { plan = exchange.child(); @@ -1941,6 +2045,7 @@ private PhysicalPlan physicalPlan(String query) { var logical = logicalOptimizer.optimize(analyzer.analyze(parser.createStatement(query))); // System.out.println("Logical\n" + logical); var physical = mapper.map(logical); + // System.out.println(physical); assertSerialization(physical); return physical; } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java index 53e2a2e412fcd..b20d166beb22e 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/parser/StatementParserTests.java @@ -26,8 +26,10 @@ import org.elasticsearch.xpack.esql.plan.logical.InlineStats; import org.elasticsearch.xpack.esql.plan.logical.MvExpand; import org.elasticsearch.xpack.esql.plan.logical.Row; +import org.elasticsearch.xpack.ql.capabilities.UnresolvedException; import org.elasticsearch.xpack.ql.expression.Alias; import org.elasticsearch.xpack.ql.expression.EmptyAttribute; +import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.NamedExpression; import org.elasticsearch.xpack.ql.expression.Order; @@ -41,6 +43,7 @@ import org.elasticsearch.xpack.ql.plan.logical.Limit; import org.elasticsearch.xpack.ql.plan.logical.LogicalPlan; import org.elasticsearch.xpack.ql.plan.logical.OrderBy; +import org.elasticsearch.xpack.ql.plan.logical.Project; import org.elasticsearch.xpack.ql.type.DataType; import org.elasticsearch.xpack.ql.type.DataTypes; import org.elasticsearch.xpack.versionfield.Version; @@ -53,6 +56,7 @@ import java.util.List; import java.util.function.Function; +import static org.elasticsearch.xpack.esql.EsqlTestUtils.as; import static org.elasticsearch.xpack.ql.expression.Literal.FALSE; import static org.elasticsearch.xpack.ql.expression.Literal.TRUE; import static org.elasticsearch.xpack.ql.expression.function.FunctionResolutionStrategy.DEFAULT; @@ -60,6 +64,7 @@ import static org.elasticsearch.xpack.ql.type.DataTypes.KEYWORD; import static org.elasticsearch.xpack.ql.util.NumericUtils.asLongUnsigned; import static org.hamcrest.Matchers.allOf; +import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -576,13 +581,10 @@ public void testDeprecatedIsNullFunction() { public void testMetadataFieldOnOtherSources() { expectError( "row a = 1 [metadata _index]", - "1:11: mismatched input '[' expecting {, PIPE, 'and', COMMA, 'or', '+', '-', '*', '/', '%'}" - ); - expectError("show functions [metadata _index]", "line 1:16: mismatched input '[' expecting {, PIPE}"); - expectError( - "explain [from foo] [metadata _index]", - "line 1:20: mismatched input '[' expecting {PIPE, COMMA, OPENING_BRACKET, ']'}" + "1:11: mismatched input '[' expecting {, '|', 'and', ',', 'or', '+', '-', '*', '/', '%'}" ); + expectError("show functions [metadata _index]", "line 1:16: token recognition error at: '['"); + expectError("explain [from foo] [metadata _index]", "line 1:20: mismatched input '[' expecting {'|', ',', OPENING_BRACKET, ']'}"); } public void testMetadataFieldMultipleDeclarations() { @@ -709,6 +711,17 @@ public void testMvExpand() { assertThat(expand.target(), equalTo(attribute("a"))); } + // see https://github.com/elastic/elasticsearch/issues/103331 + public void testKeepStarMvExpand() { + try { + String query = "from test | keep * | mv_expand a"; + var plan = statement(query); + } catch (UnresolvedException e) { + fail(e, "Regression: https://github.com/elastic/elasticsearch/issues/103331"); + } + + } + public void testUsageOfProject() { processingCommand("project a"); assertWarnings("PROJECT command is no longer supported, please use KEEP instead"); @@ -799,6 +812,29 @@ public void testMissingInputParams() { expectError("row x = ?, y = ?", List.of(new TypedParamValue("integer", 1)), "Not enough actual parameters 1"); } + public void testFieldContainingDotsAndNumbers() { + LogicalPlan where = processingCommand("where `a.b.1m.4321`"); + assertThat(where, instanceOf(Filter.class)); + Filter w = (Filter) where; + assertThat(w.child(), equalTo(PROCESSING_CMD_INPUT)); + assertThat(Expressions.name(w.condition()), equalTo("a.b.1m.4321")); + } + + public void testFieldQualifiedName() { + LogicalPlan where = processingCommand("where a.b.`1m`.`4321`"); + assertThat(where, instanceOf(Filter.class)); + Filter w = (Filter) where; + assertThat(w.child(), equalTo(PROCESSING_CMD_INPUT)); + assertThat(Expressions.name(w.condition()), equalTo("a.b.1m.4321")); + } + + public void testQuotedName() { + // row `my-field`=123 | stats count(`my-field`) | eval x = `count(`my-field`)` + LogicalPlan plan = processingCommand("stats count(`my-field`) | keep `count(``my-field``)`"); + var project = as(plan, Project.class); + assertThat(Expressions.names(project.projections()), contains("count(`my-field`)")); + } + private void assertIdentifierAsIndexPattern(String identifier, String statement) { LogicalPlan from = statement(statement); assertThat(from, instanceOf(EsqlUnresolvedRelation.class)); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java index b4c9d7a9baeca..1d2b11d3deb89 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/EvalMapperTests.java @@ -12,12 +12,12 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.util.MockBigArrays; import org.elasticsearch.common.util.PageCacheRecycler; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.operator.DriverContext; import org.elasticsearch.compute.operator.EvalOperator; import org.elasticsearch.indices.breaker.NoneCircuitBreakerService; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.esql.SerializationTestUtils; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.evaluator.EvalMapper; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.Equals; import org.elasticsearch.xpack.esql.evaluator.predicate.operator.comparison.GreaterThan; @@ -162,7 +162,7 @@ private static FieldAttribute field(String name, DataType type) { static DriverContext driverContext() { return new DriverContext( new MockBigArrays(PageCacheRecycler.NON_RECYCLING_INSTANCE, new NoneCircuitBreakerService()).withCircuitBreaking(), - BlockFactory.getNonBreakingInstance() + TestBlockFactory.getNonBreakingInstance() ); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/GrokEvaluatorExtracterTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/GrokEvaluatorExtracterTests.java index 6a5c8fd3f92c2..af7a66fea9bb2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/GrokEvaluatorExtracterTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/GrokEvaluatorExtracterTests.java @@ -9,6 +9,7 @@ import org.apache.lucene.util.BytesRef; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.BooleanBlock; import org.elasticsearch.compute.data.BytesRefBlock; import org.elasticsearch.compute.data.DoubleBlock; @@ -18,6 +19,7 @@ import org.elasticsearch.grok.Grok; import org.elasticsearch.grok.GrokBuiltinPatterns; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.evaluator.command.GrokEvaluatorExtracter; import java.util.Map; @@ -26,6 +28,8 @@ import static org.hamcrest.Matchers.is; public class GrokEvaluatorExtracterTests extends ESTestCase { + final BlockFactory blockFactory = TestBlockFactory.getNonBreakingInstance(); + final Map KEY_TO_BLOCK = Map.of("a", 0, "b", 1, "c", 2, "d", 3, "e", 4, "f", 5); final Map TYPES = Map.of( "a", @@ -196,7 +200,7 @@ private void checkBooleanBlock(Block.Builder builder, int[] itemsPerRow, boolean private BytesRefBlock buildInputBlock(int[] mvSize, String... input) { int nextString = 0; - BytesRefBlock.Builder inputBuilder = BytesRefBlock.newBlockBuilder(input.length); + BytesRefBlock.Builder inputBuilder = blockFactory.newBytesRefBlockBuilder(input.length); for (int i = 0; i < mvSize.length; i++) { if (mvSize[i] == 0) { inputBuilder.appendNull(); @@ -222,12 +226,12 @@ private BytesRefBlock buildInputBlock(int[] mvSize, String... input) { private Block.Builder[] buidDefaultTargetBlocks(int estimatedSize) { return new Block.Builder[] { - BytesRefBlock.newBlockBuilder(estimatedSize), - IntBlock.newBlockBuilder(estimatedSize), - LongBlock.newBlockBuilder(estimatedSize), - DoubleBlock.newBlockBuilder(estimatedSize), - DoubleBlock.newBlockBuilder(estimatedSize), - BooleanBlock.newBlockBuilder(estimatedSize) }; + blockFactory.newBytesRefBlockBuilder(estimatedSize), + blockFactory.newIntBlockBuilder(estimatedSize), + blockFactory.newLongBlockBuilder(estimatedSize), + blockFactory.newDoubleBlockBuilder(estimatedSize), + blockFactory.newDoubleBlockBuilder(estimatedSize), + blockFactory.newBooleanBlockBuilder(estimatedSize) }; } private GrokEvaluatorExtracter buildExtracter(String pattern, Map keyToBlock, Map types) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java index 24fcae0f6bbb0..27a45e71a69c1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/LocalExecutionPlannerTests.java @@ -19,7 +19,6 @@ import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; -import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.lucene.LuceneSourceOperator; import org.elasticsearch.compute.lucene.LuceneTopNSourceOperator; import org.elasticsearch.compute.operator.SourceOperator; @@ -31,6 +30,7 @@ import org.elasticsearch.search.internal.ContextIndexSearcher; import org.elasticsearch.search.internal.SearchContext; import org.elasticsearch.test.TestSearchContext; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import org.elasticsearch.xpack.esql.plugin.QueryPragmas; @@ -124,7 +124,7 @@ private LocalExecutionPlanner planner() throws IOException { "test", null, BigArrays.NON_RECYCLING_INSTANCE, - BlockFactory.getNonBreakingInstance(), + TestBlockFactory.getNonBreakingInstance(), Settings.EMPTY, config(), null, diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java index 601184252814e..8377530b9fbc2 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/planner/TestPhysicalOperationProviders.java @@ -13,11 +13,10 @@ import org.elasticsearch.compute.aggregation.GroupingAggregator; import org.elasticsearch.compute.aggregation.blockhash.BlockHash; import org.elasticsearch.compute.data.Block; +import org.elasticsearch.compute.data.BlockFactory; import org.elasticsearch.compute.data.DocBlock; import org.elasticsearch.compute.data.DocVector; import org.elasticsearch.compute.data.ElementType; -import org.elasticsearch.compute.data.IntArrayVector; -import org.elasticsearch.compute.data.IntBlock; import org.elasticsearch.compute.data.IntVector; import org.elasticsearch.compute.data.Page; import org.elasticsearch.compute.operator.DriverContext; @@ -27,6 +26,7 @@ import org.elasticsearch.compute.operator.SourceOperator; import org.elasticsearch.compute.operator.SourceOperator.SourceOperatorFactory; import org.elasticsearch.xpack.esql.EsqlIllegalArgumentException; +import org.elasticsearch.xpack.esql.TestBlockFactory; import org.elasticsearch.xpack.esql.plan.physical.AggregateExec; import org.elasticsearch.xpack.esql.plan.physical.EsQueryExec; import org.elasticsearch.xpack.esql.plan.physical.FieldExtractExec; @@ -92,6 +92,11 @@ public Operator.OperatorFactory ordinalGroupingOperatorFactory( private class TestSourceOperator extends SourceOperator { boolean finished = false; + private final DriverContext driverContext; + + TestSourceOperator(DriverContext driverContext) { + this.driverContext = driverContext; + } @Override public Page getOutput() { @@ -99,15 +104,14 @@ public Page getOutput() { finish(); } - return new Page( - new Block[] { - new DocVector( - IntBlock.newConstantBlockWith(0, testData.getPositionCount()).asVector(), - IntBlock.newConstantBlockWith(0, testData.getPositionCount()).asVector(), - new IntArrayVector(IntStream.range(0, testData.getPositionCount()).toArray(), testData.getPositionCount()), - true - ).asBlock() } + BlockFactory blockFactory = driverContext.blockFactory(); + DocVector docVector = new DocVector( + blockFactory.newConstantIntVector(0, testData.getPositionCount()), + blockFactory.newConstantIntVector(0, testData.getPositionCount()), + blockFactory.newIntArrayVector(IntStream.range(0, testData.getPositionCount()).toArray(), testData.getPositionCount()), + true ); + return new Page(docVector.asBlock()); } @Override @@ -128,11 +132,9 @@ public void close() { private class TestSourceOperatorFactory implements SourceOperatorFactory { - SourceOperator op = new TestSourceOperator(); - @Override public SourceOperator get(DriverContext driverContext) { - return op; + return new TestSourceOperator(driverContext); } @Override @@ -292,7 +294,8 @@ private Block extractBlockForColumn(Page page, String columnName) { DocBlock docBlock = page.getBlock(0); IntVector docIndices = docBlock.asVector().docs(); Block originalData = testData.getBlock(columnIndex); - Block.Builder builder = originalData.elementType().newBlockBuilder(docIndices.getPositionCount()); + Block.Builder builder = originalData.elementType() + .newBlockBuilder(docIndices.getPositionCount(), TestBlockFactory.getNonBreakingInstance()); for (int c = 0; c < docIndices.getPositionCount(); c++) { int doc = docIndices.getInt(c); builder.copyFrom(originalData, doc, doc + 1); diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java index 0aaf4a1a18e32..fb5135d1de54c 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/session/IndexResolverFieldNamesTests.java @@ -198,7 +198,7 @@ public void testEvalDateTruncGrouping() { | eval y = date_trunc(hire_date, 1 year) | stats count(emp_no) by y | sort y - | keep y, count(emp_no) + | keep y, `count(emp_no)` | limit 5""", Set.of("hire_date", "hire_date.*", "emp_no", "emp_no.*")); } diff --git a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java index 5f219bd8ce592..652dcefa2a605 100644 --- a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java +++ b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexIT.java @@ -232,21 +232,16 @@ public void testRetryPointInTime() throws Exception { ).keepAlive(TimeValue.timeValueMinutes(2)); final String pitId = client().execute(TransportOpenPointInTimeAction.TYPE, openPointInTimeRequest).actionGet().getPointInTimeId(); try { - assertNoFailuresAndResponse( - prepareSearch().setIndices(indexName).setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), - searchResponse -> { - assertThat(searchResponse.pointInTimeId(), equalTo(pitId)); - assertHitCount(searchResponse, numDocs); - } - ); + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), searchResponse -> { + assertThat(searchResponse.pointInTimeId(), equalTo(pitId)); + assertHitCount(searchResponse, numDocs); + }); internalCluster().restartNode(assignedNode); ensureGreen(indexName); assertNoFailuresAndResponse( - prepareSearch().setIndices(indexName) - .setQuery(new RangeQueryBuilder("created_date").gte("2011-01-01").lte("2011-12-12")) + prepareSearch().setQuery(new RangeQueryBuilder("created_date").gte("2011-01-01").lte("2011-12-12")) .setSearchType(SearchType.QUERY_THEN_FETCH) - .setPreference(null) .setPreFilterShardSize(between(1, 10)) .setAllowPartialSearchResults(true) .setPointInTime(new PointInTimeBuilder(pitId)), @@ -287,7 +282,7 @@ public void testPointInTimeWithDeletedIndices() { indicesAdmin().prepareDelete("index-1").get(); // Return partial results if allow partial search result is allowed assertResponse( - prepareSearch().setPreference(null).setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pitId)), + prepareSearch().setAllowPartialSearchResults(true).setPointInTime(new PointInTimeBuilder(pitId)), searchResponse -> { assertFailures(searchResponse); assertHitCount(searchResponse, index2); @@ -296,7 +291,7 @@ public void testPointInTimeWithDeletedIndices() { // Fails if allow partial search result is not allowed expectThrows( ElasticsearchException.class, - prepareSearch().setPreference(null).setAllowPartialSearchResults(false).setPointInTime(new PointInTimeBuilder(pitId))::get + prepareSearch().setAllowPartialSearchResults(false).setPointInTime(new PointInTimeBuilder(pitId))::get ); } finally { client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(pitId)).actionGet(); @@ -322,7 +317,7 @@ public void testOpenPointInTimeWithNoIndexMatched() { .getPointInTimeId(); try { assertNoFailuresAndResponse( - prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), + prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), searchResponse -> assertHitCount(searchResponse, numDocs) ); } finally { @@ -338,7 +333,7 @@ public void testOpenPointInTimeWithNoIndexMatched() { .actionGet() .getPointInTimeId(); try { - assertHitCountAndNoFailures(prepareSearch().setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), 0); + assertHitCountAndNoFailures(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), 0); } finally { client().execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(pitId)).actionGet(); } diff --git a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java index ca848c8bb8c44..5a7453903b13b 100644 --- a/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java +++ b/x-pack/plugin/frozen-indices/src/internalClusterTest/java/org/elasticsearch/index/engine/frozen/FrozenIndexTests.java @@ -150,11 +150,7 @@ public void testCloseFreezeAndOpen() throws Exception { try { for (int from = 0; from < 3; from++) { assertResponse( - client().prepareSearch() - .setIndicesOptions(IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED) - .setPointInTime(new PointInTimeBuilder(pitId)) - .setSize(1) - .setFrom(from), + client().prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)).setSize(1).setFrom(from), response -> { assertHitCount(response, 3); assertEquals(1, response.getHits().getHits().length); diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java index 3bf1afd7e394d..c9da34b209279 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/TimeSeriesLifecycleActionsIT.java @@ -77,6 +77,7 @@ import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; public class TimeSeriesLifecycleActionsIT extends ESRestTestCase { @@ -1220,10 +1221,19 @@ private void assertHistoryIsPresent( } // Finally, check that the history index is in a good state - Step.StepKey stepKey = getStepKeyForIndex(client(), DataStream.getDefaultBackingIndexName("ilm-history-5", 1)); - assertEquals("hot", stepKey.phase()); - assertEquals(RolloverAction.NAME, stepKey.action()); - assertEquals(WaitForRolloverReadyStep.NAME, stepKey.name()); + String historyIndexName = DataStream.getDefaultBackingIndexName("ilm-history-7", 1); + Response explainHistoryIndex = client().performRequest(new Request("GET", historyIndexName + "/_lifecycle/explain")); + Map responseMap; + try (InputStream is = explainHistoryIndex.getEntity().getContent()) { + responseMap = XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true); + } + + @SuppressWarnings("unchecked") + Map> indexResponse = ((Map>) responseMap.get("indices")); + Map historyIndexDSLExplain = indexResponse.get(historyIndexName); + assertThat(historyIndexDSLExplain, is(notNullValue())); + assertThat(historyIndexDSLExplain.get("managed_by_lifecycle"), is(true)); + assertThat(historyIndexDSLExplain.get("index_creation_date_millis"), is(notNullValue())); } private void createSlmPolicy(String smlPolicy, String repo) throws IOException { diff --git a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java index 2e61b6e978b61..6d3811fd66d9c 100644 --- a/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java +++ b/x-pack/plugin/ilm/qa/multi-node/src/javaRestTest/java/org/elasticsearch/xpack/ilm/actions/DownsampleActionIT.java @@ -302,6 +302,7 @@ public void testRollupIndexInTheHotPhaseAfterRollover() throws Exception { }); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103439") public void testTsdbDataStreams() throws Exception { // Create the ILM policy DateHistogramInterval fixedInterval = ConfigTestHelpers.randomInterval(); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMigrateToDataTiersAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMigrateToDataTiersAction.java index 6a37ae708f872..095cb212be558 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMigrateToDataTiersAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMigrateToDataTiersAction.java @@ -33,9 +33,14 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient client) throws IOException { - MigrateToDataTiersRequest migrateRequest = request.hasContent() - ? MigrateToDataTiersRequest.parse(request.contentParser()) - : new MigrateToDataTiersRequest(); + MigrateToDataTiersRequest migrateRequest; + if (request.hasContent()) { + try (var parser = request.contentParser()) { + migrateRequest = MigrateToDataTiersRequest.parse(parser); + } + } else { + migrateRequest = new MigrateToDataTiersRequest(); + } migrateRequest.setDryRun(request.paramAsBoolean("dry_run", false)); return channel -> client.execute(MigrateToDataTiersAction.INSTANCE, migrateRequest, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMoveToStepAction.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMoveToStepAction.java index e963cade94c81..f6b28a6ed3b8a 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMoveToStepAction.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/action/RestMoveToStepAction.java @@ -35,8 +35,10 @@ public String getName() { @Override protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { String index = restRequest.param("name"); - XContentParser parser = restRequest.contentParser(); - MoveToStepAction.Request request = MoveToStepAction.Request.parseRequest(index, parser); + MoveToStepAction.Request request; + try (XContentParser parser = restRequest.contentParser()) { + request = MoveToStepAction.Request.parseRequest(index, parser); + } request.timeout(restRequest.paramAsTime("timeout", request.timeout())); request.masterNodeTimeout(restRequest.paramAsTime("master_timeout", request.masterNodeTimeout())); return channel -> client.execute(MoveToStepAction.INSTANCE, request, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java index 6d57e75b3a4cb..5633033e6faa1 100644 --- a/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java +++ b/x-pack/plugin/ilm/src/main/java/org/elasticsearch/xpack/ilm/history/ILMHistoryTemplateRegistry.java @@ -34,10 +34,12 @@ public class ILMHistoryTemplateRegistry extends IndexTemplateRegistry { // version 3: templates moved to composable templates // version 4: add `allow_auto_create` setting // version 5: convert to data stream - public static final int INDEX_TEMPLATE_VERSION = 5; + // version 6: manage by data stream lifecycle + // version 7: version the index template name so we can upgrade existing deployments + public static final int INDEX_TEMPLATE_VERSION = 7; public static final String ILM_TEMPLATE_VERSION_VARIABLE = "xpack.ilm_history.template.version"; - public static final String ILM_TEMPLATE_NAME = "ilm-history"; + public static final String ILM_TEMPLATE_NAME = "ilm-history-" + INDEX_TEMPLATE_VERSION; public static final String ILM_POLICY_NAME = "ilm-history-ilm-policy"; diff --git a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java index 6ac3a4522fb3d..0eece33e2e581 100644 --- a/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java +++ b/x-pack/plugin/ilm/src/test/java/org/elasticsearch/xpack/ilm/history/ILMHistoryStoreTests.java @@ -53,6 +53,9 @@ import static org.elasticsearch.xpack.core.ilm.LifecycleSettings.LIFECYCLE_HISTORY_INDEX_ENABLED_SETTING; import static org.elasticsearch.xpack.ilm.history.ILMHistoryStore.ILM_HISTORY_DATA_STREAM; +import static org.elasticsearch.xpack.ilm.history.ILMHistoryTemplateRegistry.ILM_TEMPLATE_NAME; +import static org.elasticsearch.xpack.ilm.history.ILMHistoryTemplateRegistry.INDEX_TEMPLATE_VERSION; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -284,6 +287,10 @@ public void onFailure(Exception e) { } } + public void testTemplateNameIsVersioned() { + assertThat(ILM_TEMPLATE_NAME, endsWith("-" + INDEX_TEMPLATE_VERSION)); + } + /** * A client that delegates to a verifying function for action/request/listener */ diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java index e08224aaffdd5..33d71c65ed643 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/InferencePlugin.java @@ -42,6 +42,7 @@ import org.elasticsearch.xpack.inference.action.TransportInferenceAction; import org.elasticsearch.xpack.inference.action.TransportInferenceUsageAction; import org.elasticsearch.xpack.inference.action.TransportPutInferenceModelAction; +import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.HttpSettings; import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; @@ -114,7 +115,8 @@ public List getRestHandlers( @Override public Collection createComponents(PluginServices services) { var throttlerManager = new ThrottlerManager(settings, services.threadPool(), services.clusterService()); - serviceComponents.set(new ServiceComponents(services.threadPool(), throttlerManager, settings)); + var truncator = new Truncator(settings, services.clusterService()); + serviceComponents.set(new ServiceComponents(services.threadPool(), throttlerManager, settings, truncator)); httpManager.set(HttpClientManager.create(settings, services.threadPool(), services.clusterService(), throttlerManager)); @@ -211,7 +213,8 @@ public List> getSettings() { HttpClientManager.getSettings(), HttpRequestSenderFactory.HttpRequestSender.getSettings(), ThrottlerManager.getSettings(), - RetrySettings.getSettingsDefinitions() + RetrySettings.getSettingsDefinitions(), + Truncator.getSettings() ).flatMap(Collection::stream).collect(Collectors.toList()); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/Truncator.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/Truncator.java new file mode 100644 index 0000000000000..2da509d0d9520 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/common/Truncator.java @@ -0,0 +1,146 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Setting; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.Nullable; + +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; +import java.util.Objects; + +/** + * Provides truncation logic for inference requests + */ +public class Truncator { + + /** + * Defines the percentage to reduce the input text for an inference request. + */ + static final Setting REDUCTION_PERCENTAGE_SETTING = Setting.doubleSetting( + "xpack.inference.truncator.reducation_percentage", + 0.5, + 0.01, + 0.99, + Setting.Property.NodeScope, + Setting.Property.Dynamic + ); + + public static List> getSettings() { + return List.of(REDUCTION_PERCENTAGE_SETTING); + } + + /** + * OpenAI estimates that there are 4 characters per token + * here. + * We'll take a conservative approach and assume there's a token every 3 characters. + */ + private static final double CHARS_PER_TOKEN = 3; + + public static double countTokens(String text) { + return Math.ceil(text.length() / CHARS_PER_TOKEN); + } + + private volatile double reductionPercentage; + + public Truncator(Settings settings, ClusterService clusterService) { + this.reductionPercentage = REDUCTION_PERCENTAGE_SETTING.get(settings); + + clusterService.getClusterSettings().addSettingsUpdateConsumer(REDUCTION_PERCENTAGE_SETTING, this::setReductionPercentage); + } + + private void setReductionPercentage(double percentage) { + reductionPercentage = percentage; + } + + /** + * Truncate each entry in the list to the specified number of tokens. + * @param input list of strings + * @param tokenLimit the number of tokens to limit the text to + * @return the resulting list of text and whether it was truncated + */ + public static TruncationResult truncate(List input, @Nullable Integer tokenLimit) { + if (tokenLimit == null) { + return new TruncationResult(input, new boolean[input.size()]); + } + + var maxLength = maxLength(tokenLimit); + + var truncatedText = new ArrayList(input.size()); + var wasTruncated = new boolean[input.size()]; + + for (int i = 0; i < input.size(); i++) { + var text = input.get(i); + var truncateResult = truncate(text, maxLength); + truncatedText.add(truncateResult.input); + wasTruncated[i] = truncateResult.truncated; + } + + return new TruncationResult(truncatedText, wasTruncated); + } + + private static int maxLength(Integer maxTokens) { + if (maxTokens == null) { + return Integer.MAX_VALUE; + } + + return (int) Math.floor(maxTokens * CHARS_PER_TOKEN); + } + + private static TruncationEntry truncate(String text, int textLength) { + var truncatedText = text.substring(0, Math.min(text.length(), textLength)); + var truncated = truncatedText.length() < text.length(); + + return new TruncationEntry(truncatedText, truncated); + } + + /** + * Truncate each entry in the list by the percentage value specified in the {@link #REDUCTION_PERCENTAGE_SETTING} setting. + * @param input list of strings + * @return the resulting list of text and whether it was truncated + */ + public TruncationResult truncate(List input) { + var truncatedText = new ArrayList(input.size()); + var wasTruncated = new boolean[input.size()]; + + for (int i = 0; i < input.size(); i++) { + var text = input.get(i); + var truncateResult = truncate(text); + truncatedText.add(truncateResult.input); + wasTruncated[i] = truncateResult.truncated; + } + + return new TruncationResult(truncatedText, wasTruncated); + } + + private TruncationEntry truncate(String text) { + var length = (int) Math.floor(text.length() * reductionPercentage); + return truncate(text, length); + } + + private record TruncationEntry(String input, boolean truncated) {} + + public record TruncationResult(List input, boolean[] truncated) { + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TruncationResult that = (TruncationResult) o; + return Objects.equals(input, that.input) && Arrays.equals(truncated, that.truncated); + } + + @Override + public int hashCode() { + return Objects.hash(input, Arrays.hashCode(truncated)); + } + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java index 2cf9168f60986..0fa6edb043611 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceAction.java @@ -12,6 +12,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.retry.ResponseHandler; import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; @@ -19,7 +20,6 @@ import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceInferenceRequest; -import org.elasticsearch.xpack.inference.external.request.huggingface.HuggingFaceInferenceRequestEntity; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceModel; @@ -27,6 +27,7 @@ import java.util.Objects; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.common.Truncator.truncate; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; @@ -37,6 +38,8 @@ public class HuggingFaceAction implements ExecutableAction { private final String errorMessage; private final RetryingHttpSender sender; private final ResponseHandler responseHandler; + private final Truncator truncator; + private final Integer tokenLimit; public HuggingFaceAction( Sender sender, @@ -60,15 +63,19 @@ public HuggingFaceAction( ); this.account = new HuggingFaceAccount(model.getUri(), model.getApiKey()); this.errorMessage = format("Failed to send Hugging Face %s request to [%s]", requestType, model.getUri().toString()); + this.truncator = Objects.requireNonNull(serviceComponents.truncator()); + this.tokenLimit = model.getTokenLimit(); } @Override public void execute(List input, ActionListener listener) { try { - HuggingFaceInferenceRequest request = new HuggingFaceInferenceRequest(account, new HuggingFaceInferenceRequestEntity(input)); + var truncatedInput = truncate(input, tokenLimit); + + HuggingFaceInferenceRequest request = new HuggingFaceInferenceRequest(truncator, account, truncatedInput); ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); - sender.send(request.createRequest(), responseHandler, wrappedListener); + sender.send(request, responseHandler, wrappedListener); } catch (ElasticsearchException e) { listener.onFailure(e); } catch (Exception e) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java index 71e6bf98838fc..20128f1168bb9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsAction.java @@ -11,12 +11,12 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.core.Nullable; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.action.ExecutableAction; import org.elasticsearch.xpack.inference.external.http.sender.Sender; import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; import org.elasticsearch.xpack.inference.external.openai.OpenAiClient; import org.elasticsearch.xpack.inference.external.request.openai.OpenAiEmbeddingsRequest; -import org.elasticsearch.xpack.inference.external.request.openai.OpenAiEmbeddingsRequestEntity; import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsModel; @@ -25,6 +25,7 @@ import java.util.Objects; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.common.Truncator.truncate; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.createInternalServerError; import static org.elasticsearch.xpack.inference.external.action.ActionUtils.wrapFailuresInElasticsearchException; @@ -34,6 +35,7 @@ public class OpenAiEmbeddingsAction implements ExecutableAction { private final OpenAiClient client; private final OpenAiEmbeddingsModel model; private final String errorMessage; + private final Truncator truncator; public OpenAiEmbeddingsAction(Sender sender, OpenAiEmbeddingsModel model, ServiceComponents serviceComponents) { this.model = Objects.requireNonNull(model); @@ -44,6 +46,7 @@ public OpenAiEmbeddingsAction(Sender sender, OpenAiEmbeddingsModel model, Servic ); this.client = new OpenAiClient(Objects.requireNonNull(sender), Objects.requireNonNull(serviceComponents)); this.errorMessage = getErrorMessage(this.model.getServiceSettings().uri()); + this.truncator = Objects.requireNonNull(serviceComponents.truncator()); } private static String getErrorMessage(@Nullable URI uri) { @@ -57,10 +60,9 @@ private static String getErrorMessage(@Nullable URI uri) { @Override public void execute(List input, ActionListener listener) { try { - OpenAiEmbeddingsRequest request = new OpenAiEmbeddingsRequest( - account, - new OpenAiEmbeddingsRequestEntity(input, model.getTaskSettings().model(), model.getTaskSettings().user()) - ); + var truncatedInput = truncate(input, model.getServiceSettings().maxInputTokens()); + + OpenAiEmbeddingsRequest request = new OpenAiEmbeddingsRequest(truncator, account, truncatedInput, model.getTaskSettings()); ActionListener wrappedListener = wrapFailuresInElasticsearchException(errorMessage, listener); client.send(request, wrappedListener); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java index 49d0d768d9089..d95c690917d50 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/AlwaysRetryingResponseHandler.java @@ -12,6 +12,7 @@ import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.io.IOException; @@ -50,7 +51,7 @@ public String getRequestType() { } @Override - public InferenceServiceResults parseResult(HttpResult result) throws RetryException { + public InferenceServiceResults parseResult(Request request, HttpResult result) throws RetryException { try { return parseFunction.apply(result); } catch (Exception e) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java index 4e36d6a59a5e6..e40f4efad348c 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/BaseResponseHandler.java @@ -9,12 +9,11 @@ import org.apache.http.client.methods.HttpRequestBase; import org.elasticsearch.ElasticsearchStatusException; -import org.elasticsearch.core.CheckedFunction; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; -import java.io.IOException; import java.util.Objects; import java.util.function.Function; @@ -26,26 +25,23 @@ public abstract class BaseResponseHandler implements ResponseHandler { public static final String RATE_LIMIT = "Received a rate limit status code"; public static final String AUTHENTICATION = "Received an authentication error status code"; public static final String REDIRECTION = "Unhandled redirection"; + public static final String CONTENT_TOO_LARGE = "Received a content too large status code"; public static final String UNSUCCESSFUL = "Received an unsuccessful status code"; protected final String requestType; - private final CheckedFunction parseFunction; + private final ResponseParser parseFunction; private final Function errorParseFunction; - public BaseResponseHandler( - String requestType, - CheckedFunction parseFunction, - Function errorParseFunction - ) { + public BaseResponseHandler(String requestType, ResponseParser parseFunction, Function errorParseFunction) { this.requestType = Objects.requireNonNull(requestType); this.parseFunction = Objects.requireNonNull(parseFunction); this.errorParseFunction = Objects.requireNonNull(errorParseFunction); } @Override - public InferenceServiceResults parseResult(HttpResult result) throws RetryException { + public InferenceServiceResults parseResult(Request request, HttpResult result) throws RetryException { try { - return parseFunction.apply(result); + return parseFunction.apply(request, result); } catch (Exception e) { throw new RetryException(true, e); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ContentTooLargeException.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ContentTooLargeException.java new file mode 100644 index 0000000000000..15fb8dedc133a --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ContentTooLargeException.java @@ -0,0 +1,25 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.elasticsearch.xpack.inference.external.request.Request; + +/** + * Provides an exception for truncating the request input. + */ +public class ContentTooLargeException extends RetryException { + + public ContentTooLargeException(Throwable cause) { + super(true, cause); + } + + @Override + public Request rebuildRequest(Request original) { + return original.truncate(); + } +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandler.java index 93921295bc677..cb05af18e6a09 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseHandler.java @@ -11,6 +11,7 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; /** @@ -34,11 +35,12 @@ void validateResponse(ThrottlerManager throttlerManager, Logger logger, HttpRequ /** * A method for parsing the response from the server. - * @param result The wrapped response from the server. + * @param request The original request sent to the server + * @param result The wrapped response from the server * @return the parsed inference results * @throws RetryException if a parsing error occurs */ - InferenceServiceResults parseResult(HttpResult result) throws RetryException; + InferenceServiceResults parseResult(Request request, HttpResult result) throws RetryException; /** * A string to uniquely identify the type of request that is being handled. This allows loggers to clarify which type of request diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseParser.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseParser.java new file mode 100644 index 0000000000000..be6a4e9dbb480 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/ResponseParser.java @@ -0,0 +1,19 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; + +import java.io.IOException; + +@FunctionalInterface +public interface ResponseParser { + InferenceServiceResults apply(Request request, HttpResult result) throws IOException; +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/Retrier.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/Retrier.java index 4f97b6cc4ae47..2e2ba03345a3b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/Retrier.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/Retrier.java @@ -7,10 +7,10 @@ package org.elasticsearch.xpack.inference.external.http.retry; -import org.apache.http.client.methods.HttpRequestBase; import org.elasticsearch.action.ActionListener; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.xpack.inference.external.request.Request; public interface Retrier { - void send(HttpRequestBase request, ResponseHandler responseHandler, ActionListener listener); + void send(Request request, ResponseHandler responseHandler, ActionListener listener); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryException.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryException.java index b4598717e7fc8..9b04903457008 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryException.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryException.java @@ -9,8 +9,9 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchWrapperException; +import org.elasticsearch.xpack.inference.external.request.Request; -public class RetryException extends ElasticsearchException implements ElasticsearchWrapperException { +public class RetryException extends ElasticsearchException implements ElasticsearchWrapperException, Retryable { private final boolean shouldRetry; public RetryException(boolean shouldRetry, Throwable cause) { @@ -32,6 +33,12 @@ public RetryException(boolean shouldRetry, String msg, Throwable cause) { this.shouldRetry = shouldRetry; } + @Override + public Request rebuildRequest(Request original) { + return original; + } + + @Override public boolean shouldRetry() { return shouldRetry; } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/Retryable.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/Retryable.java new file mode 100644 index 0000000000000..63ddd13e08a82 --- /dev/null +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/Retryable.java @@ -0,0 +1,20 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.external.http.retry; + +import org.elasticsearch.xpack.inference.external.request.Request; + +/** + * Provides an interface for determining if an error should be retried and a way to modify + * the request to based on the type of failure that occurred. + */ +public interface Retryable { + Request rebuildRequest(Request original); + + boolean shouldRetry(); +} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java index 70f2a9e0dde16..123b921cc7d30 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSender.java @@ -18,6 +18,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.io.IOException; @@ -64,10 +65,10 @@ public RetryingHttpSender( } private class InternalRetrier extends RetryableAction { - private final HttpRequestBase request; + private Request request; private final ResponseHandler responseHandler; - InternalRetrier(HttpRequestBase request, ResponseHandler responseHandler, ActionListener listener) { + InternalRetrier(Request request, ResponseHandler responseHandler, ActionListener listener) { super( logger, threadPool, @@ -83,27 +84,30 @@ private class InternalRetrier extends RetryableAction { @Override public void tryAction(ActionListener listener) { + var httpRequest = request.createRequest(); + ActionListener responseListener = ActionListener.wrap(result -> { try { - responseHandler.validateResponse(throttlerManager, logger, request, result); - InferenceServiceResults inferenceResults = responseHandler.parseResult(result); + responseHandler.validateResponse(throttlerManager, logger, httpRequest, result); + InferenceServiceResults inferenceResults = responseHandler.parseResult(request, result); listener.onResponse(inferenceResults); } catch (Exception e) { - logException(request, result, responseHandler.getRequestType(), e); + logException(httpRequest, result, responseHandler.getRequestType(), e); listener.onFailure(e); } }, e -> { - logException(request, responseHandler.getRequestType(), e); + logException(httpRequest, responseHandler.getRequestType(), e); listener.onFailure(transformIfRetryable(e)); }); - sender.send(request, responseListener); + sender.send(httpRequest, responseListener); } @Override public boolean shouldRetry(Exception e) { - if (e instanceof RetryException retry) { + if (e instanceof Retryable retry) { + request = retry.rebuildRequest(request); return retry.shouldRetry(); } @@ -137,7 +141,7 @@ private Exception transformIfRetryable(Exception e) { } @Override - public void send(HttpRequestBase request, ResponseHandler responseHandler, ActionListener listener) { + public void send(Request request, ResponseHandler responseHandler, ActionListener listener) { InternalRetrier retrier = new InternalRetrier(request, responseHandler, listener); retrier.run(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java index eb7bc3d6a0b28..64e5460bcce15 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandler.java @@ -9,21 +9,19 @@ import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.Logger; -import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.ContentTooLargeException; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; import org.elasticsearch.xpack.inference.external.response.huggingface.HuggingFaceErrorResponseEntity; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; -import java.io.IOException; - import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; public class HuggingFaceResponseHandler extends BaseResponseHandler { - public HuggingFaceResponseHandler(String requestType, CheckedFunction parseFunction) { + public HuggingFaceResponseHandler(String requestType, ResponseParser parseFunction) { super(requestType, parseFunction, HuggingFaceErrorResponseEntity::fromResponse); } @@ -52,6 +50,8 @@ void checkForFailureStatusCode(HttpRequestBase request, HttpResult result) throw throw new RetryException(true, buildError(RATE_LIMIT, request, result)); } else if (statusCode >= 500) { throw new RetryException(false, buildError(SERVER_ERROR, request, result)); + } else if (statusCode == 413) { + throw new ContentTooLargeException(buildError(CONTENT_TOO_LARGE, request, result)); } else if (statusCode == 401) { throw new RetryException(false, buildError(AUTHENTICATION, request, result)); } else if (statusCode >= 300 && statusCode < 400) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClient.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClient.java index e31bc3b2fd41e..cb82616587091 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClient.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClient.java @@ -38,7 +38,7 @@ public OpenAiClient(Sender sender, ServiceComponents serviceComponents) { } public void send(OpenAiEmbeddingsRequest request, ActionListener listener) throws IOException { - sender.send(request.createRequest(), EMBEDDINGS_HANDLER, listener); + sender.send(request, EMBEDDINGS_HANDLER, listener); } private static ResponseHandler createEmbeddingsHandler() { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java index 43c234a6809c4..207e3c2bbd035 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandler.java @@ -11,16 +11,14 @@ import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.Logger; import org.elasticsearch.common.Strings; -import org.elasticsearch.core.CheckedFunction; -import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.retry.BaseResponseHandler; +import org.elasticsearch.xpack.inference.external.http.retry.ContentTooLargeException; +import org.elasticsearch.xpack.inference.external.http.retry.ResponseParser; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; import org.elasticsearch.xpack.inference.external.response.openai.OpenAiErrorResponseEntity; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; -import java.io.IOException; - import static org.elasticsearch.xpack.inference.external.http.HttpUtils.checkForEmptyBody; public class OpenAiResponseHandler extends BaseResponseHandler { @@ -36,7 +34,9 @@ public class OpenAiResponseHandler extends BaseResponseHandler { // The remaining number of tokens that are permitted before exhausting the rate limit. static final String REMAINING_TOKENS = "x-ratelimit-remaining-tokens"; - public OpenAiResponseHandler(String requestType, CheckedFunction parseFunction) { + static final String CONTENT_TOO_LARGE_MESSAGE = "Please reduce your prompt; or completion length."; + + public OpenAiResponseHandler(String requestType, ResponseParser parseFunction) { super(requestType, parseFunction, OpenAiErrorResponseEntity::fromResponse); } @@ -65,7 +65,9 @@ void checkForFailureStatusCode(HttpRequestBase request, HttpResult result) throw if (statusCode >= 500) { throw new RetryException(false, buildError(SERVER_ERROR, request, result)); } else if (statusCode == 429) { - throw new RetryException(true, buildError(buildRateLimitErrorMessage(request, result), request, result)); + throw new RetryException(true, buildError(buildRateLimitErrorMessage(result), request, result)); + } else if (isContentTooLarge(result)) { + throw new ContentTooLargeException(buildError(CONTENT_TOO_LARGE, request, result)); } else if (statusCode == 401) { throw new RetryException(false, buildError(AUTHENTICATION, request, result)); } else if (statusCode >= 300 && statusCode < 400) { @@ -75,9 +77,24 @@ void checkForFailureStatusCode(HttpRequestBase request, HttpResult result) throw } } - static String buildRateLimitErrorMessage(HttpRequestBase request, HttpResult result) { - var response = result.response(); + private static boolean isContentTooLarge(HttpResult result) { int statusCode = result.response().getStatusLine().getStatusCode(); + + if (statusCode == 413) { + return true; + } + + if (statusCode == 400) { + var errorEntity = OpenAiErrorResponseEntity.fromResponse(result); + + return errorEntity != null && errorEntity.getErrorMessage().contains(CONTENT_TOO_LARGE_MESSAGE); + } + + return false; + } + + static String buildRateLimitErrorMessage(HttpResult result) { + var response = result.response(); var tokenLimit = getFirstHeaderOrUnknown(response, TOKENS_LIMIT); var remainingTokens = getFirstHeaderOrUnknown(response, REMAINING_TOKENS); var requestLimit = getFirstHeaderOrUnknown(response, REQUESTS_LIMIT); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/Request.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/Request.java index 91ebfe0e3478e..a0b31f7b9dbe9 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/Request.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/Request.java @@ -9,6 +9,22 @@ import org.apache.http.client.methods.HttpRequestBase; +import java.net.URI; + public interface Request { HttpRequestBase createRequest(); + + URI getURI(); + + /** + * Create a new request with less input text. + * @return a new {@link Request} with the truncated input text + */ + Request truncate(); + + /** + * Returns an array of booleans indicating if the text input at that same array index was truncated in the request + * sent to the 3rd party server. + */ + boolean[] getTruncationInfo(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java index 8b37439fc6c8a..9f86257ba4911 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequest.java @@ -13,9 +13,11 @@ import org.apache.http.entity.ByteArrayEntity; import org.elasticsearch.common.Strings; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; import org.elasticsearch.xpack.inference.external.request.Request; +import java.net.URI; import java.nio.charset.StandardCharsets; import java.util.Objects; @@ -23,22 +25,42 @@ public class HuggingFaceInferenceRequest implements Request { + private final Truncator truncator; private final HuggingFaceAccount account; - private final HuggingFaceInferenceRequestEntity entity; + private final Truncator.TruncationResult truncationResult; - public HuggingFaceInferenceRequest(HuggingFaceAccount account, HuggingFaceInferenceRequestEntity entity) { + public HuggingFaceInferenceRequest(Truncator truncator, HuggingFaceAccount account, Truncator.TruncationResult input) { + this.truncator = Objects.requireNonNull(truncator); this.account = Objects.requireNonNull(account); - this.entity = Objects.requireNonNull(entity); + this.truncationResult = Objects.requireNonNull(input); } public HttpRequestBase createRequest() { HttpPost httpPost = new HttpPost(account.url()); - ByteArrayEntity byteEntity = new ByteArrayEntity(Strings.toString(entity).getBytes(StandardCharsets.UTF_8)); + ByteArrayEntity byteEntity = new ByteArrayEntity( + Strings.toString(new HuggingFaceInferenceRequestEntity(truncationResult.input())).getBytes(StandardCharsets.UTF_8) + ); httpPost.setEntity(byteEntity); httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaTypeWithoutParameters()); httpPost.setHeader(createAuthBearerHeader(account.apiKey())); return httpPost; } + + public URI getURI() { + return account.url(); + } + + @Override + public Request truncate() { + var truncateResult = truncator.truncate(truncationResult.input()); + + return new HuggingFaceInferenceRequest(truncator, account, truncateResult); + } + + @Override + public boolean[] getTruncationInfo() { + return truncationResult.truncated().clone(); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java index d195563227d65..3a9fab44aa04e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequest.java @@ -16,8 +16,10 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; import org.elasticsearch.xpack.inference.external.request.Request; +import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettings; import java.net.URI; import java.net.URISyntaxException; @@ -29,35 +31,68 @@ public class OpenAiEmbeddingsRequest implements Request { + private final Truncator truncator; private final OpenAiAccount account; - private final OpenAiEmbeddingsRequestEntity entity; + private final Truncator.TruncationResult truncationResult; + private final URI uri; + private final OpenAiEmbeddingsTaskSettings taskSettings; - public OpenAiEmbeddingsRequest(OpenAiAccount account, OpenAiEmbeddingsRequestEntity entity) { + public OpenAiEmbeddingsRequest( + Truncator truncator, + OpenAiAccount account, + Truncator.TruncationResult input, + OpenAiEmbeddingsTaskSettings taskSettings + ) { + this.truncator = Objects.requireNonNull(truncator); this.account = Objects.requireNonNull(account); - this.entity = Objects.requireNonNull(entity); + this.truncationResult = Objects.requireNonNull(input); + this.uri = buildUri(this.account.url()); + this.taskSettings = Objects.requireNonNull(taskSettings); } - public HttpRequestBase createRequest() { + private static URI buildUri(URI accountUri) { try { - URI uriForRequest = account.url() == null ? buildDefaultUri() : account.url(); - - HttpPost httpPost = new HttpPost(uriForRequest); + return accountUri == null ? buildDefaultUri() : accountUri; + } catch (URISyntaxException e) { + throw new ElasticsearchStatusException("Failed to construct OpenAI URL", RestStatus.INTERNAL_SERVER_ERROR, e); + } + } - ByteArrayEntity byteEntity = new ByteArrayEntity(Strings.toString(entity).getBytes(StandardCharsets.UTF_8)); - httpPost.setEntity(byteEntity); + public HttpRequestBase createRequest() { + HttpPost httpPost = new HttpPost(uri); - httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType()); - httpPost.setHeader(createAuthBearerHeader(account.apiKey())); + ByteArrayEntity byteEntity = new ByteArrayEntity( + Strings.toString(new OpenAiEmbeddingsRequestEntity(truncationResult.input(), taskSettings.model(), taskSettings.user())) + .getBytes(StandardCharsets.UTF_8) + ); + httpPost.setEntity(byteEntity); - var org = account.organizationId(); - if (org != null) { - httpPost.setHeader(createOrgHeader(org)); - } + httpPost.setHeader(HttpHeaders.CONTENT_TYPE, XContentType.JSON.mediaType()); + httpPost.setHeader(createAuthBearerHeader(account.apiKey())); - return httpPost; - } catch (URISyntaxException e) { - throw new ElasticsearchStatusException("Failed to construct OpenAI URL", RestStatus.INTERNAL_SERVER_ERROR, e); + var org = account.organizationId(); + if (org != null) { + httpPost.setHeader(createOrgHeader(org)); } + + return httpPost; + } + + @Override + public URI getURI() { + return uri; + } + + @Override + public Request truncate() { + var truncatedInput = truncator.truncate(truncationResult.input()); + + return new OpenAiEmbeddingsRequest(truncator, account, truncatedInput, taskSettings); + } + + @Override + public boolean[] getTruncationInfo() { + return truncationResult.truncated().clone(); } // default for testing diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java index 7ef0d1cdbf3c7..247537b9958d0 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntity.java @@ -15,6 +15,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; import java.io.IOException; import java.util.ArrayList; @@ -56,15 +57,16 @@ public class HuggingFaceElserResponseEntity { * * */ - public static SparseEmbeddingResults fromResponse(HttpResult response) throws IOException { + public static SparseEmbeddingResults fromResponse(Request request, HttpResult response) throws IOException { var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { moveToFirstToken(jsonParser); + var truncationResults = request.getTruncationInfo(); List parsedEmbeddings = XContentParserUtils.parseList( jsonParser, - HuggingFaceElserResponseEntity::parseExpansionResult + (parser, index) -> HuggingFaceElserResponseEntity.parseExpansionResult(truncationResults, parser, index) ); if (parsedEmbeddings.isEmpty()) { @@ -75,7 +77,8 @@ public static SparseEmbeddingResults fromResponse(HttpResult response) throws IO } } - private static SparseEmbeddingResults.Embedding parseExpansionResult(XContentParser parser) throws IOException { + private static SparseEmbeddingResults.Embedding parseExpansionResult(boolean[] truncationResults, XContentParser parser, int index) + throws IOException { XContentParser.Token token = parser.currentToken(); XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser); @@ -89,9 +92,9 @@ private static SparseEmbeddingResults.Embedding parseExpansionResult(XContentPar weightedTokens.add(new SparseEmbeddingResults.WeightedToken(parser.currentName(), parser.floatValue())); } - // TODO how do we know if the tokens were truncated so we can set this appropriately? - // This will depend on whether we handle the tokenization or hugging face - return new SparseEmbeddingResults.Embedding(weightedTokens, false); + // prevent an out of bounds if for some reason the truncation list is smaller than the results + var isTruncated = truncationResults != null && index < truncationResults.length && truncationResults[index]; + return new SparseEmbeddingResults.Embedding(weightedTokens, isTruncated); } private HuggingFaceElserResponseEntity() {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java index fb7cbf5d49768..b74b03891034f 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntity.java @@ -15,6 +15,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; import java.io.IOException; import java.util.List; @@ -30,7 +31,7 @@ public class HuggingFaceEmbeddingsResponseEntity { * Parse the response from hugging face. The known formats are an array of arrays and object with an {@code embeddings} field containing * an array of arrays. */ - public static TextEmbeddingResults fromResponse(HttpResult response) throws IOException { + public static TextEmbeddingResults fromResponse(Request request, HttpResult response) throws IOException { var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java index b723cb0f86dea..4926ba3f0ef6b 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntity.java @@ -15,6 +15,7 @@ import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; import java.io.IOException; import java.util.List; @@ -68,7 +69,7 @@ public class OpenAiEmbeddingsResponseEntity { * * */ - public static TextEmbeddingResults fromResponse(HttpResult response) throws IOException { + public static TextEmbeddingResults fromResponse(Request request, HttpResult response) throws IOException { var parserConfig = XContentParserConfiguration.EMPTY.withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); try (XContentParser jsonParser = XContentFactory.xContent(XContentType.JSON).createParser(parserConfig, response.body())) { diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java index beecf75da38ab..0286390a8a3ec 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/rest/RestInferenceAction.java @@ -33,8 +33,9 @@ public List routes() { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { String taskType = restRequest.param("task_type"); String modelId = restRequest.param("model_id"); - var request = InferenceAction.Request.parseRequest(modelId, taskType, restRequest.contentParser()); - - return channel -> client.execute(InferenceAction.INSTANCE, request, new RestToXContentListener<>(channel)); + try (var parser = restRequest.contentParser()) { + var request = InferenceAction.Request.parseRequest(modelId, taskType, parser); + return channel -> client.execute(InferenceAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceComponents.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceComponents.java index bff1ce70b6e13..1fc3c75517875 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceComponents.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/ServiceComponents.java @@ -9,9 +9,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.common.Truncator; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; /** * A container for common components need at various levels of the inference services to instantiate their internals */ -public record ServiceComponents(ThreadPool threadPool, ThrottlerManager throttlerManager, Settings settings) {} +public record ServiceComponents(ThreadPool threadPool, ThrottlerManager throttlerManager, Settings settings, Truncator truncator) {} diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java index d672afa99ea9d..82076c865fee8 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceModel.java @@ -26,4 +26,6 @@ public HuggingFaceModel(ModelConfigurations configurations, ModelSecrets secrets public abstract URI getUri(); public abstract SecureString getApiKey(); + + public abstract Integer getTokenLimit(); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java index 2f2780dfd77fb..f1f177bb6bac3 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceService.java @@ -64,7 +64,7 @@ private static HuggingFaceEmbeddingsModel updateModelWithEmbeddingDetails(Huggin model.getServiceSettings().uri(), null, // Similarity measure is unknown embeddingSize, - null // max input tokens is unknown + model.getTokenLimit() ); return new HuggingFaceEmbeddingsModel(model, serviceSettings); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java index 24160387179ff..4d784f96b0205 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserModel.java @@ -70,4 +70,9 @@ public URI getUri() { public SecureString getApiKey() { return getSecretSettings().apiKey(); } + + @Override + public Integer getTokenLimit() { + return getServiceSettings().maxInputTokens(); + } } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java index 777c55b56ff3a..03bc6290f5dc2 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/elser/HuggingFaceElserServiceSettings.java @@ -20,12 +20,14 @@ import java.util.Map; import java.util.Objects; -import static org.elasticsearch.xpack.inference.services.ServiceFields.URL; +import static org.elasticsearch.xpack.inference.services.ServiceFields.MAX_INPUT_TOKENS; import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; import static org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings.extractUri; -public record HuggingFaceElserServiceSettings(URI uri) implements ServiceSettings { +public record HuggingFaceElserServiceSettings(URI uri, Integer maxInputTokens) implements ServiceSettings { + public static final String NAME = "hugging_face_elser_service_settings"; + private static final Integer ELSER_TOKEN_LIMIT = 512; static final String URL = "url"; @@ -35,7 +37,7 @@ public static HuggingFaceElserServiceSettings fromMap(Map map) { if (validationException.validationErrors().isEmpty() == false) { throw validationException; } - return new HuggingFaceElserServiceSettings(uri); + return new HuggingFaceElserServiceSettings(uri, ELSER_TOKEN_LIMIT); } public HuggingFaceElserServiceSettings { @@ -43,7 +45,7 @@ public static HuggingFaceElserServiceSettings fromMap(Map map) { } public HuggingFaceElserServiceSettings(String url) { - this(createUri(url)); + this(createUri(url), ELSER_TOKEN_LIMIT); } public HuggingFaceElserServiceSettings(StreamInput in) throws IOException { @@ -54,6 +56,7 @@ public HuggingFaceElserServiceSettings(StreamInput in) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); builder.field(URL, uri.toString()); + builder.field(MAX_INPUT_TOKENS, maxInputTokens); builder.endObject(); return builder; diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java index ad9f09529de40..36d469fd05056 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModel.java @@ -67,6 +67,11 @@ public SecureString getApiKey() { return getSecretSettings().apiKey(); } + @Override + public Integer getTokenLimit() { + return getServiceSettings().maxInputTokens(); + } + @Override public ExecutableAction accept(HuggingFaceActionVisitor creator) { return creator.create(this); diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java index 85c2a99d530a1..1bdd1abce0b45 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/openai/OpenAiService.java @@ -162,7 +162,7 @@ private OpenAiEmbeddingsModel updateModelWithEmbeddingDetails(OpenAiEmbeddingsMo model.getServiceSettings().organizationId(), SimilarityMeasure.DOT_PRODUCT, embeddingSize, - null + model.getServiceSettings().maxInputTokens() ); return new OpenAiEmbeddingsModel(model, serviceSettings); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java new file mode 100644 index 0000000000000..356caecf8fadb --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/Utils.java @@ -0,0 +1,63 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference; + +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; +import org.elasticsearch.threadpool.ScalingExecutorBuilder; +import org.elasticsearch.xpack.inference.common.Truncator; +import org.elasticsearch.xpack.inference.external.http.HttpClientManager; +import org.elasticsearch.xpack.inference.external.http.HttpSettings; +import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; +import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; +import org.elasticsearch.xpack.inference.logging.ThrottlerManager; + +import java.util.Collection; +import java.util.stream.Collectors; +import java.util.stream.Stream; + +import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; +import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; + +public class Utils { + public static ClusterService mockClusterServiceEmpty() { + return mockClusterService(Settings.EMPTY); + } + + public static ClusterService mockClusterService(Settings settings) { + var clusterService = mock(ClusterService.class); + + var registeredSettings = Stream.of( + HttpSettings.getSettings(), + HttpClientManager.getSettings(), + HttpRequestSenderFactory.HttpRequestSender.getSettings(), + ThrottlerManager.getSettings(), + RetrySettings.getSettingsDefinitions(), + Truncator.getSettings() + ).flatMap(Collection::stream).collect(Collectors.toSet()); + + var cSettings = new ClusterSettings(settings, registeredSettings); + when(clusterService.getClusterSettings()).thenReturn(cSettings); + + return clusterService; + } + + public static ScalingExecutorBuilder inferenceUtilityPool() { + return new ScalingExecutorBuilder( + UTILITY_THREAD_POOL_NAME, + 1, + 4, + TimeValue.timeValueMinutes(10), + false, + "xpack.inference.utility_thread_pool" + ); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/TruncatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/TruncatorTests.java new file mode 100644 index 0000000000000..898c965941413 --- /dev/null +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/common/TruncatorTests.java @@ -0,0 +1,106 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.inference.common; + +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.test.ESTestCase; + +import java.util.List; + +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.common.Truncator.truncate; +import static org.hamcrest.Matchers.is; + +public class TruncatorTests extends ESTestCase { + + public void testTruncate_Percentage_ReducesLengthByHalf() { + var truncator = createTruncator(); + assertThat( + truncator.truncate(List.of("123456", "awesome")), + is(new Truncator.TruncationResult(List.of("123", "awe"), new boolean[] { true, true })) + ); + } + + public void testTruncate_Percentage_OnlyTruncatesTheFirstEntry() { + var truncator = createTruncator(); + assertThat( + truncator.truncate(List.of("123456", "")), + is(new Truncator.TruncationResult(List.of("123", ""), new boolean[] { true, false })) + ); + } + + public void testTruncate_Percentage_ReducesLengthToZero() { + var truncator = createTruncator(); + assertThat(truncator.truncate(List.of("1")), is(new Truncator.TruncationResult(List.of(""), new boolean[] { true }))); + } + + public void testTruncate_Percentage_ReturnsAnEmptyString_WhenItIsAnEmptyString() { + var truncator = createTruncator(); + assertThat(truncator.truncate(List.of("")), is(new Truncator.TruncationResult(List.of(""), new boolean[] { false }))); + } + + public void testTruncate_Percentage_ReturnsAnEmptyString_WhenPercentageIs0_01() { + var truncator = createTruncator(0.01); + assertThat(truncator.truncate(List.of("abc")), is(new Truncator.TruncationResult(List.of(""), new boolean[] { true }))); + } + + public void testTruncate_Percentage_ReturnsStringWithTwoCharacters_IfPercentageIs0_99() { + var truncator = createTruncator(0.99); + assertThat(truncator.truncate(List.of("abc")), is(new Truncator.TruncationResult(List.of("ab"), new boolean[] { true }))); + } + + public void testTruncate_Tokens_DoesNotTruncateWhenLimitIsNull() { + assertThat( + truncate(List.of("abcd", "123"), null), + is(new Truncator.TruncationResult(List.of("abcd", "123"), new boolean[] { false, false })) + ); + } + + public void testTruncate_Tokens_ReducesLengthTo3Characters() { + assertThat( + truncate(List.of("abcd", "123 abcd"), 1), + is(new Truncator.TruncationResult(List.of("abc", "123"), new boolean[] { true, true })) + ); + } + + public void testTruncate_Tokens_OnlyTruncatesTheFirstEntry() { + assertThat( + truncate(List.of("abcd", "123"), 1), + is(new Truncator.TruncationResult(List.of("abc", "123"), new boolean[] { true, false })) + ); + } + + public void testTruncate_Tokens_ReturnsAnEmptyString_WhenItIsAnEmptyString() { + assertThat(truncate(List.of(""), 1), is(new Truncator.TruncationResult(List.of(""), new boolean[] { false }))); + } + + public void testTruncate_Tokens_ReturnsAnEmptyString_WhenMaxTokensIs0() { + assertThat(truncate(List.of("abc"), 0), is(new Truncator.TruncationResult(List.of(""), new boolean[] { true }))); + } + + public void testTruncate_Tokens_ReturnsTheSameValueStringIfTokensIsGreaterThanStringSize() { + assertThat(truncate(List.of("abc"), 2), is(new Truncator.TruncationResult(List.of("abc"), new boolean[] { false }))); + } + + public void testTruncate_ThrowsIfPercentageIsGreaterThan0_99() { + expectThrows(IllegalArgumentException.class, () -> createTruncator(0.991)); + } + + public void testTruncate_ThrowsIfPercentageIsLessThan0_01() { + expectThrows(IllegalArgumentException.class, () -> createTruncator(0.0099)); + } + + public static Truncator createTruncator() { + return new Truncator(Settings.EMPTY, mockClusterServiceEmpty()); + } + + public static Truncator createTruncator(double percentage) { + var settings = Settings.builder().put(Truncator.REDUCTION_PERCENTAGE_SETTING.getKey(), percentage).build(); + return new Truncator(settings, mockClusterServiceEmpty()); + } +} diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java index c66f967de508f..95b69f1231e9d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionCreatorTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.common.TruncatorTests; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; @@ -35,10 +36,10 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; -import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.buildSettingsWithRetryFields; import static org.elasticsearch.xpack.inference.logging.ThrottlerManagerTests.mockThrottlerManager; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; @@ -150,7 +151,12 @@ public void testSend_FailsFromInvalidResponseFormat_ForElserAction() throws IOEx threadPool, mockThrottlerManager(), // timeout as zero for no retries - buildSettingsWithRetryFields(TimeValue.timeValueMillis(1), TimeValue.timeValueMinutes(1), TimeValue.timeValueSeconds(0)) + buildSettingsWithRetryFields( + TimeValue.timeValueMillis(1), + TimeValue.timeValueMinutes(1), + TimeValue.timeValueSeconds(0) + ), + TruncatorTests.createTruncator() ) ); var action = actionCreator.create(model); @@ -255,7 +261,12 @@ public void testSend_FailsFromInvalidResponseFormat_ForEmbeddingsAction() throws threadPool, mockThrottlerManager(), // timeout as zero for no retries - buildSettingsWithRetryFields(TimeValue.timeValueMillis(1), TimeValue.timeValueMinutes(1), TimeValue.timeValueSeconds(0)) + buildSettingsWithRetryFields( + TimeValue.timeValueMillis(1), + TimeValue.timeValueMinutes(1), + TimeValue.timeValueSeconds(0) + ), + TruncatorTests.createTruncator() ) ); var action = actionCreator.create(model); @@ -284,4 +295,115 @@ public void testSend_FailsFromInvalidResponseFormat_ForEmbeddingsAction() throws assertThat(inputList, contains("abc")); } } + + public void testExecute_ReturnsSuccessfulResponse_AfterTruncating() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJsonContentTooLarge = """ + { + "error": "Input validation error: `inputs` must have less than 512 tokens. Given: 571", + "error_type": "Validation" + } + """; + + String responseJson = """ + { + "embeddings": [ + [ + -0.0123, + 0.123 + ] + ] + { + """; + webServer.enqueue(new MockResponse().setResponseCode(413).setBody(responseJsonContentTooLarge)); + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret"); + var actionCreator = new HuggingFaceActionCreator(sender, createWithEmptySettings(threadPool)); + var action = actionCreator.create(model); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abcd"), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(TextEmbeddingResultsTests.buildExpectation(List.of(List.of(-0.0123F, 0.123F))))); + + assertThat(webServer.requests(), hasSize(2)); + { + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var initialRequestAsMap = entityAsMap(webServer.requests().get(0).getBody()); + var initialInputs = initialRequestAsMap.get("inputs"); + assertThat(initialInputs, is(List.of("abcd"))); + } + { + assertNull(webServer.requests().get(1).getUri().getQuery()); + assertThat( + webServer.requests().get(1).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(1).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var truncatedRequest = entityAsMap(webServer.requests().get(1).getBody()); + var truncatedInputs = truncatedRequest.get("inputs"); + assertThat(truncatedInputs, is(List.of("ab"))); + } + } + } + + public void testExecute_TruncatesInputBeforeSending() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "embeddings": [ + [ + -0.0123, + 0.123 + ] + ] + { + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + // truncated to 1 token = 3 characters + var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret", 1); + var actionCreator = new HuggingFaceActionCreator(sender, createWithEmptySettings(threadPool)); + var action = actionCreator.create(model); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("123456"), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(TextEmbeddingResultsTests.buildExpectation(List.of(List.of(-0.0123F, 0.123F))))); + + assertThat(webServer.requests(), hasSize(1)); + + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat( + webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), + equalTo(XContentType.JSON.mediaTypeWithoutParameters()) + ); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + + var initialRequestAsMap = entityAsMap(webServer.requests().get(0).getBody()); + var initialInputs = initialRequestAsMap.get("inputs"); + assertThat(initialInputs, is(List.of("123"))); + + } + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java index 7b1301a75a1fd..40eec862150c7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/huggingface/HuggingFaceActionTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.inference.InferenceServiceResults; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.common.TruncatorTests; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.retry.AlwaysRetryingResponseHandler; import org.elasticsearch.xpack.inference.external.http.sender.Sender; @@ -28,7 +29,7 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.services.huggingface.elser.HuggingFaceElserModelTests.createModel; import static org.hamcrest.Matchers.is; import static org.mockito.ArgumentMatchers.any; @@ -106,7 +107,7 @@ private HuggingFaceAction createAction(String url, Sender sender) { return new HuggingFaceAction( sender, model, - new ServiceComponents(threadPool, mock(ThrottlerManager.class), Settings.EMPTY), + new ServiceComponents(threadPool, mock(ThrottlerManager.class), Settings.EMPTY, TruncatorTests.createTruncator()), new AlwaysRetryingResponseHandler("test", (result) -> null), "test action" ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java index edd51d7aa6b4c..23b6f1ea2fbe3 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiActionCreatorTests.java @@ -9,6 +9,7 @@ import org.apache.http.HttpHeaders; import org.elasticsearch.action.support.PlainActionFuture; +import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; @@ -27,10 +28,10 @@ import java.util.List; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; -import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; @@ -113,4 +114,224 @@ public void testCreate_OpenAiEmbeddingsModel() throws IOException { assertThat(requestMap.get("user"), is("overridden_user")); } } + + public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From413StatusCode() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + var contentTooLargeErrorMessage = + "This model's maximum context length is 8192 tokens, however you requested 13531 tokens (13531 in your prompt;" + + "0 for the completion). Please reduce your prompt; or completion length."; + + String responseJsonContentTooLarge = Strings.format(""" + { + "error": { + "message": "%s", + "type": "content_too_large", + "param": null, + "code": null + } + } + """, contentTooLargeErrorMessage); + + String responseJson = """ + { + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.0123, + -0.0123 + ] + } + ], + "model": "text-embedding-ada-002-v2", + "usage": { + "prompt_tokens": 8, + "total_tokens": 8 + } + } + """; + webServer.enqueue(new MockResponse().setResponseCode(413).setBody(responseJsonContentTooLarge)); + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createModel(getUrl(webServer), "org", "secret", "model", "user"); + var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var overriddenTaskSettings = getRequestTaskSettingsMap(null, "overridden_user"); + var action = actionCreator.create(model, overriddenTaskSettings); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abcd"), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(buildExpectation(List.of(List.of(0.0123F, -0.0123F))))); + assertThat(webServer.requests(), hasSize(2)); + { + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + assertThat(webServer.requests().get(0).getHeader(ORGANIZATION_HEADER), equalTo("org")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(3)); + assertThat(requestMap.get("input"), is(List.of("abcd"))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("overridden_user")); + } + { + assertNull(webServer.requests().get(1).getUri().getQuery()); + assertThat(webServer.requests().get(1).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); + assertThat(webServer.requests().get(1).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + assertThat(webServer.requests().get(1).getHeader(ORGANIZATION_HEADER), equalTo("org")); + + var requestMap = entityAsMap(webServer.requests().get(1).getBody()); + assertThat(requestMap.size(), is(3)); + assertThat(requestMap.get("input"), is(List.of("ab"))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("overridden_user")); + } + } + } + + public void testExecute_ReturnsSuccessfulResponse_AfterTruncating_From400StatusCode() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + var contentTooLargeErrorMessage = + "This model's maximum context length is 8192 tokens, however you requested 13531 tokens (13531 in your prompt;" + + "0 for the completion). Please reduce your prompt; or completion length."; + + String responseJsonContentTooLarge = Strings.format(""" + { + "error": { + "message": "%s", + "type": "content_too_large", + "param": null, + "code": null + } + } + """, contentTooLargeErrorMessage); + + String responseJson = """ + { + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.0123, + -0.0123 + ] + } + ], + "model": "text-embedding-ada-002-v2", + "usage": { + "prompt_tokens": 8, + "total_tokens": 8 + } + } + """; + webServer.enqueue(new MockResponse().setResponseCode(400).setBody(responseJsonContentTooLarge)); + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = createModel(getUrl(webServer), "org", "secret", "model", "user"); + var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var overriddenTaskSettings = getRequestTaskSettingsMap(null, "overridden_user"); + var action = actionCreator.create(model, overriddenTaskSettings); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("abcd"), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(buildExpectation(List.of(List.of(0.0123F, -0.0123F))))); + assertThat(webServer.requests(), hasSize(2)); + { + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + assertThat(webServer.requests().get(0).getHeader(ORGANIZATION_HEADER), equalTo("org")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(3)); + assertThat(requestMap.get("input"), is(List.of("abcd"))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("overridden_user")); + } + { + assertNull(webServer.requests().get(1).getUri().getQuery()); + assertThat(webServer.requests().get(1).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); + assertThat(webServer.requests().get(1).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + assertThat(webServer.requests().get(1).getHeader(ORGANIZATION_HEADER), equalTo("org")); + + var requestMap = entityAsMap(webServer.requests().get(1).getBody()); + assertThat(requestMap.size(), is(3)); + assertThat(requestMap.get("input"), is(List.of("ab"))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("overridden_user")); + } + } + } + + public void testExecute_TruncatesInputBeforeSending() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var sender = senderFactory.createSender("test_service")) { + sender.start(); + + String responseJson = """ + { + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.0123, + -0.0123 + ] + } + ], + "model": "text-embedding-ada-002-v2", + "usage": { + "prompt_tokens": 8, + "total_tokens": 8 + } + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + // truncated to 1 token = 3 characters + var model = createModel(getUrl(webServer), "org", "secret", "model", "user", 1); + var actionCreator = new OpenAiActionCreator(sender, createWithEmptySettings(threadPool)); + var overriddenTaskSettings = getRequestTaskSettingsMap(null, "overridden_user"); + var action = actionCreator.create(model, overriddenTaskSettings); + + PlainActionFuture listener = new PlainActionFuture<>(); + action.execute(List.of("super long input"), listener); + + var result = listener.actionGet(TIMEOUT); + + assertThat(result.asMap(), is(buildExpectation(List.of(List.of(0.0123F, -0.0123F))))); + assertThat(webServer.requests(), hasSize(1)); + assertNull(webServer.requests().get(0).getUri().getQuery()); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.CONTENT_TYPE), equalTo(XContentType.JSON.mediaType())); + assertThat(webServer.requests().get(0).getHeader(HttpHeaders.AUTHORIZATION), equalTo("Bearer secret")); + assertThat(webServer.requests().get(0).getHeader(ORGANIZATION_HEADER), equalTo("org")); + + var requestMap = entityAsMap(webServer.requests().get(0).getBody()); + assertThat(requestMap.size(), is(3)); + assertThat(requestMap.get("input"), is(List.of("sup"))); + assertThat(requestMap.get("model"), is("model")); + assertThat(requestMap.get("user"), is("overridden_user")); + } + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java index eabaf02b377dd..6bc8e2d61d579 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/action/openai/OpenAiEmbeddingsActionTests.java @@ -33,10 +33,10 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; -import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java index 246e7d6d44c5a..8fbef260dfc50 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientManagerTests.java @@ -25,10 +25,10 @@ import java.nio.charset.StandardCharsets; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterService; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createHttpPost; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; -import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterService; -import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.is; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java index 3a7ec9d1b0f55..a26e4433f842b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/HttpClientTests.java @@ -42,8 +42,8 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; -import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterService; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterService; import static org.elasticsearch.xpack.inference.logging.ThrottlerManagerTests.mockThrottlerManager; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java index a46586fa6121b..f29120d9026a5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/IdleConnectionEvictorTests.java @@ -20,7 +20,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.Mockito.doAnswer; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java index b433306ec8261..d82ed47441442 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/Utils.java @@ -7,56 +7,22 @@ package org.elasticsearch.xpack.inference.external.http; -import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.settings.ClusterSettings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.http.MockWebServer; -import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.xcontent.DeprecationHandler; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.XContentParser; import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; -import org.elasticsearch.xpack.inference.external.http.retry.RetrySettings; -import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; -import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; -import java.util.Collection; import java.util.Map; -import java.util.stream.Collectors; -import java.util.stream.Stream; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.inference.InferencePlugin.UTILITY_THREAD_POOL_NAME; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class Utils { - public static ClusterService mockClusterServiceEmpty() { - return mockClusterService(Settings.EMPTY); - } - - public static ClusterService mockClusterService(Settings settings) { - var clusterService = mock(ClusterService.class); - - var registeredSettings = Stream.of( - HttpSettings.getSettings(), - HttpClientManager.getSettings(), - HttpRequestSenderFactory.HttpRequestSender.getSettings(), - ThrottlerManager.getSettings(), - RetrySettings.getSettingsDefinitions() - ).flatMap(Collection::stream).collect(Collectors.toSet()); - - var cSettings = new ClusterSettings(settings, registeredSettings); - when(clusterService.getClusterSettings()).thenReturn(cSettings); - - return clusterService; - } public static String getUrl(MockWebServer webServer) { return format("http://%s:%s", webServer.getHostName(), webServer.getPort()); @@ -80,15 +46,4 @@ public static Map entityAsMap(InputStream body) throws IOExcepti return parser.map(); } } - - public static ScalingExecutorBuilder inferenceUtilityPool() { - return new ScalingExecutorBuilder( - UTILITY_THREAD_POOL_NAME, - 1, - 4, - TimeValue.timeValueMinutes(10), - false, - "xpack.inference.utility_thread_pool" - ); - } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java index 98c0afe655421..8b47dade32872 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/retry/RetryingHttpSenderTests.java @@ -12,6 +12,7 @@ import org.apache.http.StatusLine; import org.apache.http.client.methods.HttpRequestBase; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.util.concurrent.DeterministicTaskQueue; @@ -21,10 +22,13 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.external.http.HttpResult; import org.elasticsearch.xpack.inference.external.http.sender.Sender; +import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.logging.ThrottlerManager; import org.junit.Before; import org.mockito.stubbing.Answer; +import java.net.UnknownHostException; + import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.createDefaultRetrySettings; import static org.hamcrest.Matchers.is; import static org.mockito.ArgumentMatchers.any; @@ -33,6 +37,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; +import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; public class RetryingHttpSenderTests extends ESTestCase { @@ -64,7 +69,7 @@ public void testSend_CallsSenderAgain_AfterValidateResponseThrowsAnException() { doThrow(new RetryException(true, "failed")).doNothing().when(handler).validateResponse(any(), any(), any(), any()); // Mockito.thenReturn() does not compile when returning a // bounded wild card list, thenAnswer must be used instead. - when(handler.parseResult(any())).thenAnswer(answer); + when(handler.parseResult(any(), any())).thenAnswer(answer); var retrier = new RetryingHttpSender( sender, @@ -76,10 +81,11 @@ public void testSend_CallsSenderAgain_AfterValidateResponseThrowsAnException() { ); var listener = new PlainActionFuture(); - executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + executeTasks(() -> retrier.send(mockRequest(), handler, listener), 1); assertThat(listener.actionGet(TIMEOUT), is(inferenceResults)); verify(sender, times(2)).send(any(), any()); + verifyNoMoreInteractions(sender); } public void testSend_CallsSenderAgain_WhenAFailureStatusCodeIsReturned() { @@ -113,10 +119,11 @@ public void testSend_CallsSenderAgain_WhenAFailureStatusCodeIsReturned() { ); var listener = new PlainActionFuture(); - executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + executeTasks(() -> retrier.send(mockRequest(), handler, listener), 1); assertThat(listener.actionGet(TIMEOUT), is(inferenceResults)); verify(sender, times(2)).send(any(), any()); + verifyNoMoreInteractions(sender); } public void testSend_CallsSenderAgain_WhenParsingFailsOnce() { @@ -135,7 +142,7 @@ public void testSend_CallsSenderAgain_WhenParsingFailsOnce() { Answer answer = (invocation) -> inferenceResults; var handler = mock(ResponseHandler.class); - when(handler.parseResult(any())).thenThrow(new RetryException(true, "failed")).thenAnswer(answer); + when(handler.parseResult(any(), any())).thenThrow(new RetryException(true, "failed")).thenAnswer(answer); var retrier = new RetryingHttpSender( sender, @@ -147,10 +154,11 @@ public void testSend_CallsSenderAgain_WhenParsingFailsOnce() { ); var listener = new PlainActionFuture(); - executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + executeTasks(() -> retrier.send(mockRequest(), handler, listener), 1); assertThat(listener.actionGet(TIMEOUT), is(inferenceResults)); verify(sender, times(2)).send(any(), any()); + verifyNoMoreInteractions(sender); } public void testSend_DoesNotCallSenderAgain_WhenParsingFailsWithNonRetryableException() { @@ -169,7 +177,7 @@ public void testSend_DoesNotCallSenderAgain_WhenParsingFailsWithNonRetryableExce Answer answer = (invocation) -> inferenceResults; var handler = mock(ResponseHandler.class); - when(handler.parseResult(any())).thenThrow(new IllegalStateException("failed")).thenAnswer(answer); + when(handler.parseResult(any(), any())).thenThrow(new IllegalStateException("failed")).thenAnswer(answer); var retrier = new RetryingHttpSender( sender, @@ -181,12 +189,13 @@ public void testSend_DoesNotCallSenderAgain_WhenParsingFailsWithNonRetryableExce ); var listener = new PlainActionFuture(); - executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 0); + executeTasks(() -> retrier.send(mockRequest(), handler, listener), 0); var thrownException = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); assertThat(thrownException.getMessage(), is("failed")); verify(sender, times(1)).send(any(), any()); + verifyNoMoreInteractions(sender); } public void testSend_CallsSenderAgain_WhenHttpResultListenerCallsOnFailureOnce() { @@ -210,7 +219,7 @@ public void testSend_CallsSenderAgain_WhenHttpResultListenerCallsOnFailureOnce() Answer answer = (invocation) -> inferenceResults; var handler = mock(ResponseHandler.class); - when(handler.parseResult(any())).thenAnswer(answer); + when(handler.parseResult(any(), any())).thenAnswer(answer); var retrier = new RetryingHttpSender( sender, @@ -222,10 +231,51 @@ public void testSend_CallsSenderAgain_WhenHttpResultListenerCallsOnFailureOnce() ); var listener = new PlainActionFuture(); - executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + executeTasks(() -> retrier.send(mockRequest(), handler, listener), 1); assertThat(listener.actionGet(TIMEOUT), is(inferenceResults)); verify(sender, times(2)).send(any(), any()); + verifyNoMoreInteractions(sender); + } + + public void testSend_CallsSenderAgain_WhenHttpResultListenerCallsOnFailureOnce_WithContentTooLargeException() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new ContentTooLargeException(new IllegalStateException("failed"))); + + return Void.TYPE; + }).doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onResponse(new HttpResult(mock(HttpResponse.class), new byte[] { 'a' })); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var inferenceResults = mock(InferenceServiceResults.class); + Answer answer = (invocation) -> inferenceResults; + + var handler = mock(ResponseHandler.class); + when(handler.parseResult(any(), any())).thenAnswer(answer); + + var retrier = new RetryingHttpSender( + sender, + mock(ThrottlerManager.class), + mock(Logger.class), + createDefaultRetrySettings(), + taskQueue.getThreadPool(), + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + + var listener = new PlainActionFuture(); + executeTasks(() -> retrier.send(mockRequest(), handler, listener), 1); + + assertThat(listener.actionGet(TIMEOUT), is(inferenceResults)); + verify(sender, times(2)).send(any(), any()); + verifyNoMoreInteractions(sender); } public void testSend_CallsSenderAgain_WhenHttpResultListenerCallsOnFailureOnceWithConnectionClosedException() { @@ -249,7 +299,7 @@ public void testSend_CallsSenderAgain_WhenHttpResultListenerCallsOnFailureOnceWi Answer answer = (invocation) -> inferenceResults; var handler = mock(ResponseHandler.class); - when(handler.parseResult(any())).thenAnswer(answer); + when(handler.parseResult(any(), any())).thenAnswer(answer); var retrier = new RetryingHttpSender( sender, @@ -261,10 +311,46 @@ public void testSend_CallsSenderAgain_WhenHttpResultListenerCallsOnFailureOnceWi ); var listener = new PlainActionFuture(); - executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + executeTasks(() -> retrier.send(mockRequest(), handler, listener), 1); assertThat(listener.actionGet(TIMEOUT), is(inferenceResults)); verify(sender, times(2)).send(any(), any()); + verifyNoMoreInteractions(sender); + } + + public void testSend_ReturnsFailure_WhenHttpResultListenerCallsOnFailureOnceWithUnknownHostException() { + var sender = mock(Sender.class); + + doAnswer(invocation -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) invocation.getArguments()[1]; + listener.onFailure(new UnknownHostException("failed")); + + return Void.TYPE; + }).when(sender).send(any(), any()); + + var inferenceResults = mock(InferenceServiceResults.class); + Answer answer = (invocation) -> inferenceResults; + + var handler = mock(ResponseHandler.class); + when(handler.parseResult(any(), any())).thenAnswer(answer); + + var retrier = new RetryingHttpSender( + sender, + mock(ThrottlerManager.class), + mock(Logger.class), + createDefaultRetrySettings(), + taskQueue.getThreadPool(), + EsExecutors.DIRECT_EXECUTOR_SERVICE + ); + + var listener = new PlainActionFuture(); + executeTasks(() -> retrier.send(mockRequest(), handler, listener), 0); + + var thrownException = expectThrows(ElasticsearchStatusException.class, () -> listener.actionGet(TIMEOUT)); + assertThat(thrownException.getMessage(), is("Invalid host [null], please check that the URL is correct.")); + verify(sender, times(1)).send(any(), any()); + verifyNoMoreInteractions(sender); } public void testSend_ReturnsFailure_WhenValidateResponseThrowsAnException_AfterOneRetry() { @@ -288,7 +374,7 @@ public void testSend_ReturnsFailure_WhenValidateResponseThrowsAnException_AfterO doThrow(new RetryException(true, "failed")).doThrow(new IllegalStateException("failed again")) .when(handler) .validateResponse(any(), any(), any(), any()); - when(handler.parseResult(any())).thenAnswer(answer); + when(handler.parseResult(any(), any())).thenAnswer(answer); var retrier = new RetryingHttpSender( sender, @@ -300,7 +386,7 @@ public void testSend_ReturnsFailure_WhenValidateResponseThrowsAnException_AfterO ); var listener = new PlainActionFuture(); - executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + executeTasks(() -> retrier.send(mockRequest(), handler, listener), 1); var thrownException = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); assertThat(thrownException.getMessage(), is("failed again")); @@ -308,6 +394,7 @@ public void testSend_ReturnsFailure_WhenValidateResponseThrowsAnException_AfterO assertThat(thrownException.getSuppressed()[0].getMessage(), is("failed")); verify(sender, times(2)).send(any(), any()); + verifyNoMoreInteractions(sender); } public void testSend_ReturnsFailure_WhenValidateResponseThrowsAnElasticsearchException_AfterOneRetry() { @@ -331,7 +418,7 @@ public void testSend_ReturnsFailure_WhenValidateResponseThrowsAnElasticsearchExc doThrow(new RetryException(true, "failed")).doThrow(new RetryException(false, "failed again")) .when(handler) .validateResponse(any(), any(), any(), any()); - when(handler.parseResult(any())).thenAnswer(answer); + when(handler.parseResult(any(), any())).thenAnswer(answer); var retrier = new RetryingHttpSender( sender, @@ -343,13 +430,14 @@ public void testSend_ReturnsFailure_WhenValidateResponseThrowsAnElasticsearchExc ); var listener = new PlainActionFuture(); - executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + executeTasks(() -> retrier.send(mockRequest(), handler, listener), 1); var thrownException = expectThrows(RetryException.class, () -> listener.actionGet(TIMEOUT)); assertThat(thrownException.getMessage(), is("failed again")); assertThat(thrownException.getSuppressed().length, is(1)); assertThat(thrownException.getSuppressed()[0].getMessage(), is("failed")); verify(sender, times(2)).send(any(), any()); + verifyNoMoreInteractions(sender); } public void testSend_ReturnsFailure_WhenHttpResultsListenerCallsOnFailure_AfterOneRetry() { @@ -384,13 +472,14 @@ public void testSend_ReturnsFailure_WhenHttpResultsListenerCallsOnFailure_AfterO ); var listener = new PlainActionFuture(); - executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 1); + executeTasks(() -> retrier.send(mockRequest(), handler, listener), 1); var thrownException = expectThrows(RetryException.class, () -> listener.actionGet(TIMEOUT)); assertThat(thrownException.getMessage(), is("failed again")); assertThat(thrownException.getSuppressed().length, is(1)); assertThat(thrownException.getSuppressed()[0].getMessage(), is("failed")); verify(sender, times(2)).send(any(), any()); + verifyNoMoreInteractions(sender); } public void testSend_ReturnsFailure_WhenHttpResultsListenerCallsOnFailure_WithNonRetryableException() { @@ -419,12 +508,13 @@ public void testSend_ReturnsFailure_WhenHttpResultsListenerCallsOnFailure_WithNo ); var listener = new PlainActionFuture(); - executeTasks(() -> retrier.send(mock(HttpRequestBase.class), handler, listener), 0); + executeTasks(() -> retrier.send(mockRequest(), handler, listener), 0); var thrownException = expectThrows(IllegalStateException.class, () -> listener.actionGet(TIMEOUT)); assertThat(thrownException.getMessage(), is("failed")); assertThat(thrownException.getSuppressed().length, is(0)); verify(sender, times(1)).send(any(), any()); + verifyNoMoreInteractions(sender); } private static HttpResponse mockHttpResponse() { @@ -448,4 +538,12 @@ private void executeTasks(Runnable runnable, int retries) { taskQueue.runAllRunnableTasks(); } } + + private static Request mockRequest() { + var request = mock(Request.class); + when(request.truncate()).thenReturn(request); + when(request.createRequest()).thenReturn(mock(HttpRequestBase.class)); + + return request; + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java index 992f0d68bd920..b9b847b5187cb 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestExecutorServiceTests.java @@ -28,8 +28,8 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createHttpPost; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.mockito.ArgumentMatchers.any; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java index af4ac7cd59977..c3bfbe2c294fc 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/HttpRequestSenderFactoryTests.java @@ -34,9 +34,9 @@ import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createHttpPost; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; -import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java index e6c47c891f0d7..ce470fa002824 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/http/sender/RequestTaskTests.java @@ -37,10 +37,10 @@ import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createConnectionManager; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.createHttpPost; import static org.elasticsearch.xpack.inference.external.http.HttpClientTests.emptyHttpSettings; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; import static org.hamcrest.Matchers.instanceOf; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java index 9bebddc9ebb87..f4642d0bf48c2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/huggingface/HuggingFaceResponseHandlerTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ContentTooLargeException; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; import static org.hamcrest.Matchers.containsString; @@ -33,7 +34,7 @@ public void testCheckForFailureStatusCode() { var httpResult = new HttpResult(httpResponse, new byte[] {}); - var handler = new HuggingFaceResponseHandler("", result -> null); + var handler = new HuggingFaceResponseHandler("", (request, result) -> null); // 200 ok when(statusLine.getStatusCode()).thenReturn(200); @@ -65,6 +66,12 @@ public void testCheckForFailureStatusCode() { containsString("Received a rate limit status code for request [null] status [429]") ); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.TOO_MANY_REQUESTS)); + // 413 + when(statusLine.getStatusCode()).thenReturn(413); + retryException = expectThrows(ContentTooLargeException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertTrue(retryException.shouldRetry()); + assertThat(retryException.getCause().getMessage(), containsString("Received a content too large status code")); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.REQUEST_ENTITY_TOO_LARGE)); // 401 when(statusLine.getStatusCode()).thenReturn(401); retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClientTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClientTests.java index b1c7317b5e22c..bb9612f01d8ff 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClientTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiClientTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.test.http.MockWebServer; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.common.TruncatorTests; import org.elasticsearch.xpack.inference.external.http.HttpClientManager; import org.elasticsearch.xpack.inference.external.http.sender.HttpRequestSenderFactory; import org.elasticsearch.xpack.inference.external.http.sender.Sender; @@ -31,10 +32,10 @@ import java.util.concurrent.TimeUnit; import static org.elasticsearch.core.Strings.format; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; -import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.retry.RetrySettingsTests.buildSettingsWithRetryFields; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiEmbeddingsRequestTests.createRequest; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; @@ -253,7 +254,12 @@ public void testSend_FailsFromInvalidResponseFormat() throws IOException, URISyn threadPool, mockThrottlerManager(), // timeout as zero for no retries - buildSettingsWithRetryFields(TimeValue.timeValueMillis(1), TimeValue.timeValueMinutes(1), TimeValue.timeValueSeconds(0)) + buildSettingsWithRetryFields( + TimeValue.timeValueMillis(1), + TimeValue.timeValueMinutes(1), + TimeValue.timeValueSeconds(0) + ), + TruncatorTests.createTruncator() ) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java index e3698701ac1f0..e3d1c19b0452b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/openai/OpenAiResponseHandlerTests.java @@ -10,16 +10,19 @@ import org.apache.http.Header; import org.apache.http.HeaderElement; import org.apache.http.HttpResponse; -import org.apache.http.RequestLine; import org.apache.http.StatusLine; import org.apache.http.client.methods.HttpRequestBase; import org.apache.http.message.BasicHeader; import org.elasticsearch.ElasticsearchStatusException; +import org.elasticsearch.common.Strings; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.http.retry.ContentTooLargeException; import org.elasticsearch.xpack.inference.external.http.retry.RetryException; +import java.nio.charset.StandardCharsets; + import static org.hamcrest.Matchers.containsString; import static org.hamcrest.core.Is.is; import static org.mockito.ArgumentMatchers.anyString; @@ -39,7 +42,7 @@ public void testCheckForFailureStatusCode() { var httpRequest = mock(HttpRequestBase.class); var httpResult = new HttpResult(httpResponse, new byte[] {}); - var handler = new OpenAiResponseHandler("", result -> null); + var handler = new OpenAiResponseHandler("", (request, result) -> null); // 200 ok when(statusLine.getStatusCode()).thenReturn(200); @@ -59,6 +62,41 @@ public void testCheckForFailureStatusCode() { assertTrue(retryException.shouldRetry()); assertThat(retryException.getCause().getMessage(), containsString("Received a rate limit status code. Token limit")); assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.TOO_MANY_REQUESTS)); + // 413 + when(statusLine.getStatusCode()).thenReturn(413); + retryException = expectThrows(ContentTooLargeException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertTrue(retryException.shouldRetry()); + assertThat(retryException.getCause().getMessage(), containsString("Received a content too large status code")); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.REQUEST_ENTITY_TOO_LARGE)); + // 400 content too large + retryException = expectThrows( + ContentTooLargeException.class, + () -> handler.checkForFailureStatusCode(httpRequest, createContentTooLargeResult(400)) + ); + assertTrue(retryException.shouldRetry()); + assertThat(retryException.getCause().getMessage(), containsString("Received a content too large status code")); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); + // 400 generic bad request should not be marked as a content too large + when(statusLine.getStatusCode()).thenReturn(400); + retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received an unsuccessful status code for request [null] status [400]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); + // 400 is not flagged as a content too large when the error message is different + when(statusLine.getStatusCode()).thenReturn(400); + retryException = expectThrows( + RetryException.class, + () -> handler.checkForFailureStatusCode(httpRequest, createResult(400, "blah")) + ); + assertFalse(retryException.shouldRetry()); + assertThat( + retryException.getCause().getMessage(), + containsString("Received an unsuccessful status code for request [null] status [400]") + ); + assertThat(((ElasticsearchStatusException) retryException.getCause()).status(), is(RestStatus.BAD_REQUEST)); // 401 when(statusLine.getStatusCode()).thenReturn(401); retryException = expectThrows(RetryException.class, () -> handler.checkForFailureStatusCode(httpRequest, httpResult)); @@ -89,10 +127,8 @@ public void testBuildRateLimitErrorMessage() { int statusCode = 429; var statusLine = mock(StatusLine.class); when(statusLine.getStatusCode()).thenReturn(statusCode); - var requestLine = mock(RequestLine.class); var response = mock(HttpResponse.class); when(response.getStatusLine()).thenReturn(statusLine); - var request = mock(HttpRequestBase.class); var httpResult = new HttpResult(response, new byte[] {}); { @@ -109,7 +145,7 @@ public void testBuildRateLimitErrorMessage() { new BasicHeader(OpenAiResponseHandler.REMAINING_TOKENS, "99800") ); - var error = OpenAiResponseHandler.buildRateLimitErrorMessage(request, httpResult); + var error = OpenAiResponseHandler.buildRateLimitErrorMessage(httpResult); assertThat( error, containsString("Token limit [10000], remaining tokens [99800]. Request limit [3000], remaining requests [2999]") @@ -119,7 +155,7 @@ public void testBuildRateLimitErrorMessage() { { when(response.getFirstHeader(OpenAiResponseHandler.TOKENS_LIMIT)).thenReturn(null); when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_TOKENS)).thenReturn(null); - var error = OpenAiResponseHandler.buildRateLimitErrorMessage(request, httpResult); + var error = OpenAiResponseHandler.buildRateLimitErrorMessage(httpResult); assertThat( error, containsString("Token limit [unknown], remaining tokens [unknown]. Request limit [3000], remaining requests [2999]") @@ -133,7 +169,7 @@ public void testBuildRateLimitErrorMessage() { ); when(response.getFirstHeader(OpenAiResponseHandler.TOKENS_LIMIT)).thenReturn(null); when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_TOKENS)).thenReturn(null); - var error = OpenAiResponseHandler.buildRateLimitErrorMessage(request, httpResult); + var error = OpenAiResponseHandler.buildRateLimitErrorMessage(httpResult); assertThat( error, containsString("Token limit [unknown], remaining tokens [unknown]. Request limit [unknown], remaining requests [2999]") @@ -149,11 +185,39 @@ public void testBuildRateLimitErrorMessage() { new BasicHeader(OpenAiResponseHandler.TOKENS_LIMIT, "10000") ); when(response.getFirstHeader(OpenAiResponseHandler.REMAINING_TOKENS)).thenReturn(null); - var error = OpenAiResponseHandler.buildRateLimitErrorMessage(request, httpResult); + var error = OpenAiResponseHandler.buildRateLimitErrorMessage(httpResult); assertThat( error, containsString("Token limit [10000], remaining tokens [unknown]. Request limit [unknown], remaining requests [2999]") ); } } + + private static HttpResult createContentTooLargeResult(int statusCode) { + return createResult( + statusCode, + "This model's maximum context length is 8192 tokens, however you requested 13531 tokens (13531 in your prompt;" + + "0 for the completion). Please reduce your prompt; or completion length." + ); + } + + private static HttpResult createResult(int statusCode, String message) { + var statusLine = mock(StatusLine.class); + when(statusLine.getStatusCode()).thenReturn(statusCode); + var httpResponse = mock(HttpResponse.class); + when(httpResponse.getStatusLine()).thenReturn(statusLine); + + String responseJson = Strings.format(""" + { + "error": { + "message": "%s", + "type": "content_too_large", + "param": null, + "code": null + } + } + """, message); + + return new HttpResult(httpResponse, responseJson.getBytes(StandardCharsets.UTF_8)); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestEntityTests.java similarity index 93% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestEntityTests.java index 738ab3d155bc4..06cfa7de21b3b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestEntityTests.java @@ -18,7 +18,7 @@ import static org.hamcrest.CoreMatchers.is; -public class HuggingFaceElserRequestEntityTests extends ESTestCase { +public class HuggingFaceInferenceRequestEntityTests extends ESTestCase { public void testXContent() throws IOException { var entity = new HuggingFaceInferenceRequestEntity(List.of("abc")); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestTests.java similarity index 59% rename from x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java rename to x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestTests.java index 1a5eb7fb8845c..17124b6ea6d7a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceElserRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/huggingface/HuggingFaceInferenceRequestTests.java @@ -12,6 +12,8 @@ import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.common.Truncator; +import org.elasticsearch.xpack.inference.common.TruncatorTests; import org.elasticsearch.xpack.inference.external.huggingface.HuggingFaceAccount; import java.io.IOException; @@ -24,7 +26,7 @@ import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; -public class HuggingFaceElserRequestTests extends ESTestCase { +public class HuggingFaceInferenceRequestTests extends ESTestCase { @SuppressWarnings("unchecked") public void testCreateRequest() throws URISyntaxException, IOException { var huggingFaceRequest = createRequest("www.google.com", "secret", "abc"); @@ -44,10 +46,35 @@ public void testCreateRequest() throws URISyntaxException, IOException { assertThat(inputList, contains("abc")); } + public void testTruncate_ReducesInputTextSizeByHalf() throws URISyntaxException, IOException { + var huggingFaceRequest = createRequest("www.google.com", "secret", "abcd"); + var truncatedRequest = huggingFaceRequest.truncate(); + assertThat(truncatedRequest.getURI().toString(), is(new URI("www.google.com").toString())); + + var httpRequest = truncatedRequest.createRequest(); + assertThat(httpRequest, instanceOf(HttpPost.class)); + + var httpPost = (HttpPost) httpRequest; + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(requestMap.get("inputs"), instanceOf(List.class)); + assertThat(requestMap.get("inputs"), is(List.of("ab"))); + } + + public void testIsTruncated_ReturnsTrue() throws URISyntaxException, IOException { + var huggingFaceRequest = createRequest("www.google.com", "secret", "abcd"); + assertFalse(huggingFaceRequest.getTruncationInfo()[0]); + + var truncatedRequest = huggingFaceRequest.truncate(); + assertTrue(truncatedRequest.getTruncationInfo()[0]); + } + public static HuggingFaceInferenceRequest createRequest(String url, String apiKey, String input) throws URISyntaxException { var account = new HuggingFaceAccount(new URI(url), new SecureString(apiKey.toCharArray())); - var entity = new HuggingFaceInferenceRequestEntity(List.of(input)); - return new HuggingFaceInferenceRequest(account, entity); + return new HuggingFaceInferenceRequest( + TruncatorTests.createTruncator(), + account, + new Truncator.TruncationResult(List.of(input), new boolean[] { false }) + ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequestTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequestTests.java index 146601da86dbd..cbbd83c896d28 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequestTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/request/openai/OpenAiEmbeddingsRequestTests.java @@ -13,7 +13,10 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.XContentType; +import org.elasticsearch.xpack.inference.common.Truncator; +import org.elasticsearch.xpack.inference.common.TruncatorTests; import org.elasticsearch.xpack.inference.external.openai.OpenAiAccount; +import org.elasticsearch.xpack.inference.services.openai.embeddings.OpenAiEmbeddingsTaskSettings; import java.io.IOException; import java.net.URI; @@ -84,6 +87,29 @@ public void testCreateRequest_WithDefaultUrlAndWithoutUserOrganization() throws assertThat(requestMap.get("model"), is("model")); } + public void testTruncate_ReducesInputTextSizeByHalf() throws URISyntaxException, IOException { + var request = createRequest(null, null, "secret", "abcd", "model", null); + var truncatedRequest = request.truncate(); + assertThat(request.getURI().toString(), is(buildDefaultUri().toString())); + + var httpRequest = truncatedRequest.createRequest(); + assertThat(httpRequest, instanceOf(HttpPost.class)); + + var httpPost = (HttpPost) httpRequest; + var requestMap = entityAsMap(httpPost.getEntity().getContent()); + assertThat(requestMap, aMapWithSize(2)); + assertThat(requestMap.get("input"), is(List.of("ab"))); + assertThat(requestMap.get("model"), is("model")); + } + + public void testIsTruncated_ReturnsTrue() throws URISyntaxException, IOException { + var request = createRequest(null, null, "secret", "abcd", "model", null); + assertFalse(request.getTruncationInfo()[0]); + + var truncatedRequest = request.truncate(); + assertTrue(truncatedRequest.getTruncationInfo()[0]); + } + public static OpenAiEmbeddingsRequest createRequest( @Nullable String url, @Nullable String org, @@ -95,8 +121,12 @@ public static OpenAiEmbeddingsRequest createRequest( var uri = url == null ? null : new URI(url); var account = new OpenAiAccount(uri, org, new SecureString(apiKey.toCharArray())); - var entity = new OpenAiEmbeddingsRequestEntity(List.of(input), model, user); - return new OpenAiEmbeddingsRequest(account, entity); + return new OpenAiEmbeddingsRequest( + TruncatorTests.createTruncator(), + account, + new Truncator.TruncationResult(List.of(input), new boolean[] { false }), + new OpenAiEmbeddingsTaskSettings(model, user) + ); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java index 606e0cc83f451..bdb8e38fa8228 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceElserResponseEntityTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xcontent.XContentParseException; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; import org.elasticsearch.xpack.inference.results.SparseEmbeddingResultsTests; import java.io.IOException; @@ -25,6 +26,7 @@ import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.when; public class HuggingFaceElserResponseEntityTests extends ESTestCase { public void testFromResponse_CreatesTextExpansionResults() throws IOException { @@ -37,6 +39,7 @@ public void testFromResponse_CreatesTextExpansionResults() throws IOException { ]"""; SparseEmbeddingResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -50,7 +53,34 @@ public void testFromResponse_CreatesTextExpansionResults() throws IOException { ); } - public void testFromResponse_CreatesTextExpansionResultsForFirstItem() throws IOException { + public void testFromResponse_CreatesTextExpansionResults_ThatAreTruncated() throws IOException { + var request = mock(Request.class); + when(request.getTruncationInfo()).thenReturn(new boolean[] { true }); + + String responseJson = """ + [ + { + ".": 0.133155956864357, + "the": 0.6747211217880249 + } + ]"""; + + SparseEmbeddingResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + request, + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat( + parsedResults.asMap(), + is( + buildExpectation( + List.of(new SparseEmbeddingResultsTests.EmbeddingExpectation(Map.of(".", 0.13315596f, "the", 0.67472112f), true)) + ) + ) + ); + } + + public void testFromResponse_CreatesTextExpansionResultsForMultipleItems_TruncationIsNull() throws IOException { String responseJson = """ [ { @@ -64,6 +94,75 @@ public void testFromResponse_CreatesTextExpansionResultsForFirstItem() throws IO ]"""; SparseEmbeddingResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + mock(Request.class), + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat( + parsedResults.asMap(), + is( + buildExpectation( + List.of( + new SparseEmbeddingResultsTests.EmbeddingExpectation(Map.of(".", 0.13315596f, "the", 0.67472112f), false), + new SparseEmbeddingResultsTests.EmbeddingExpectation(Map.of("hi", 0.13315596f, "super", 0.67472112f), false) + ) + ) + ) + ); + } + + public void testFromResponse_CreatesTextExpansionResults_WithTruncation() throws IOException { + String responseJson = """ + [ + { + ".": 0.133155956864357, + "the": 0.6747211217880249 + }, + { + "hi": 0.133155956864357, + "super": 0.6747211217880249 + } + ]"""; + + var request = mock(Request.class); + when(request.getTruncationInfo()).thenReturn(new boolean[] { true, false }); + + SparseEmbeddingResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + request, + new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) + ); + + assertThat( + parsedResults.asMap(), + is( + buildExpectation( + List.of( + new SparseEmbeddingResultsTests.EmbeddingExpectation(Map.of(".", 0.13315596f, "the", 0.67472112f), true), + new SparseEmbeddingResultsTests.EmbeddingExpectation(Map.of("hi", 0.13315596f, "super", 0.67472112f), false) + ) + ) + ) + ); + } + + public void testFromResponse_CreatesTextExpansionResults_WithTruncationLessArrayLessThanExpected() throws IOException { + String responseJson = """ + [ + { + ".": 0.133155956864357, + "the": 0.6747211217880249 + }, + { + "hi": 0.133155956864357, + "super": 0.6747211217880249 + } + ]"""; + + var request = mock(Request.class); + when(request.getTruncationInfo()).thenReturn(new boolean[] {}); + + SparseEmbeddingResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -90,6 +189,7 @@ public void testFails_NotAnArray() { var thrownException = expectThrows( ParsingException.class, () -> HuggingFaceElserResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); @@ -112,6 +212,7 @@ public void testFails_ValueString() { var thrownException = expectThrows( ParsingException.class, () -> HuggingFaceElserResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); @@ -132,6 +233,7 @@ public void testFromResponse_CreatesResultsWithValueInt() throws IOException { """; SparseEmbeddingResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -151,6 +253,7 @@ public void testFromResponse_CreatesResultsWithValueLong() throws IOException { """; SparseEmbeddingResults parsedResults = HuggingFaceElserResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -172,6 +275,7 @@ public void testFails_ValueObject() { var thrownException = expectThrows( ParsingException.class, () -> HuggingFaceElserResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); @@ -193,6 +297,7 @@ public void testFails_ResponseIsInvalidJson_MissingSquareBracket() { var thrownException = expectThrows( XContentEOFException.class, () -> HuggingFaceElserResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); @@ -212,6 +317,7 @@ public void testFails_ResponseIsInvalidJson_MissingField() { var thrownException = expectThrows( XContentParseException.class, () -> HuggingFaceElserResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java index e3f14ad085761..2b6e11fdfafa7 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/huggingface/HuggingFaceEmbeddingsResponseEntityTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -32,6 +33,7 @@ public void testFromResponse_CreatesResultsForASingleItem_ArrayFormat() throws I """; TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -51,6 +53,7 @@ public void testFromResponse_CreatesResultsForASingleItem_ObjectFormat() throws """; TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -72,6 +75,7 @@ public void testFromResponse_CreatesResultsForMultipleItems_ArrayFormat() throws """; TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -103,6 +107,7 @@ public void testFromResponse_CreatesResultsForMultipleItems_ObjectFormat() throw """; TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -127,6 +132,7 @@ public void testFromResponse_FailsWhenArrayOfObjects() { var thrownException = expectThrows( ParsingException.class, () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); @@ -152,6 +158,7 @@ public void testFromResponse_FailsWhenEmbeddingsFieldIsNotPresent() { var thrownException = expectThrows( IllegalStateException.class, () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); @@ -174,6 +181,7 @@ public void testFromResponse_FailsWhenEmbeddingsFieldNotAnArray() { var thrownException = expectThrows( ParsingException.class, () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); @@ -196,6 +204,7 @@ public void testFromResponse_FailsWhenEmbeddingValueIsAString_ArrayFormat() { var thrownException = expectThrows( ParsingException.class, () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); @@ -220,6 +229,7 @@ public void testFromResponse_FailsWhenEmbeddingValueIsAString_ObjectFormat() { var thrownException = expectThrows( ParsingException.class, () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); @@ -240,6 +250,7 @@ public void testFromResponse_SucceedsWhenEmbeddingValueIsInt_ArrayFormat() throw """; TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -258,6 +269,7 @@ public void testFromResponse_SucceedsWhenEmbeddingValueIsInt_ObjectFormat() thro """; TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -274,6 +286,7 @@ public void testFromResponse_SucceedsWhenEmbeddingValueIsLong_ArrayFormat() thro """; TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -292,6 +305,7 @@ public void testFromResponse_SucceedsWhenEmbeddingValueIsLong_ObjectFormat() thr """; TextEmbeddingResults parsedResults = HuggingFaceEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -312,6 +326,7 @@ public void testFromResponse_FailsWhenEmbeddingValueIsAnObject_ObjectFormat() { var thrownException = expectThrows( ParsingException.class, () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); @@ -330,6 +345,7 @@ public void testFromResponse_FailsWithUnknownToken() { var thrownException = expectThrows( ParsingException.class, () -> HuggingFaceEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java index 2301be28f62c4..010e990a3ce80 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/external/response/openai/OpenAiEmbeddingsResponseEntityTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingResults; import org.elasticsearch.xpack.inference.external.http.HttpResult; +import org.elasticsearch.xpack.inference.external.request.Request; import java.io.IOException; import java.nio.charset.StandardCharsets; @@ -44,6 +45,7 @@ public void testFromResponse_CreatesResultsForASingleItem() throws IOException { """; TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -81,6 +83,7 @@ public void testFromResponse_CreatesResultsForMultipleItems() throws IOException """; TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -120,6 +123,7 @@ public void testFromResponse_FailsWhenDataFieldIsNotPresent() { var thrownException = expectThrows( IllegalStateException.class, () -> OpenAiEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); @@ -152,6 +156,7 @@ public void testFromResponse_FailsWhenDataFieldNotAnArray() { var thrownException = expectThrows( ParsingException.class, () -> OpenAiEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); @@ -187,6 +192,7 @@ public void testFromResponse_FailsWhenEmbeddingsDoesNotExist() { var thrownException = expectThrows( IllegalStateException.class, () -> OpenAiEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); @@ -218,6 +224,7 @@ public void testFromResponse_FailsWhenEmbeddingValueIsAString() { var thrownException = expectThrows( ParsingException.class, () -> OpenAiEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); @@ -250,6 +257,7 @@ public void testFromResponse_SucceedsWhenEmbeddingValueIsInt() throws IOExceptio """; TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -278,6 +286,7 @@ public void testFromResponse_SucceedsWhenEmbeddingValueIsLong() throws IOExcepti """; TextEmbeddingResults parsedResults = OpenAiEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ); @@ -308,6 +317,7 @@ public void testFromResponse_FailsWhenEmbeddingValueIsAnObject() { var thrownException = expectThrows( ParsingException.class, () -> OpenAiEmbeddingsResponseEntity.fromResponse( + mock(Request.class), new HttpResult(mock(HttpResponse.class), responseJson.getBytes(StandardCharsets.UTF_8)) ) ); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java index a9e85d0ffcb1d..e7160f0390669 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerManagerTests.java @@ -16,8 +16,8 @@ import org.junit.After; import org.junit.Before; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; -import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.mock; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerTests.java index d23f057a7a23e..77f099557629f 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/logging/ThrottlerTests.java @@ -22,7 +22,7 @@ import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java index fb61a86c7b9c4..bae2e7e9b68c9 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/SenderServiceTests.java @@ -28,7 +28,7 @@ import java.util.Set; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceComponentsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceComponentsTests.java index 8ce615ecbb060..77713fbfc30a5 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceComponentsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/ServiceComponentsTests.java @@ -10,11 +10,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.inference.common.TruncatorTests; import static org.elasticsearch.xpack.inference.logging.ThrottlerManagerTests.mockThrottlerManager; public class ServiceComponentsTests extends ESTestCase { public static ServiceComponents createWithEmptySettings(ThreadPool threadPool) { - return new ServiceComponents(threadPool, mockThrottlerManager(), Settings.EMPTY); + return new ServiceComponents(threadPool, mockThrottlerManager(), Settings.EMPTY, TruncatorTests.createTruncator()); } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java index d5eb6e76b622b..b82812d6c393a 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceBaseServiceTests.java @@ -28,7 +28,7 @@ import java.util.Map; import java.util.concurrent.TimeUnit; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.elasticsearch.xpack.inference.services.Utils.getInvalidModel; import static org.hamcrest.CoreMatchers.is; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java index fa31d026b16f5..a76cce41b4fe4 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/HuggingFaceServiceTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; @@ -42,10 +43,10 @@ import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; -import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; import static org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettingsTests.getServiceSettingsMap; @@ -552,6 +553,31 @@ public void testInfer_SendsElserRequest() throws IOException { } } + public void testCheckModelConfig_IncludesMaxTokens() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var service = new HuggingFaceService(new SetOnce<>(senderFactory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + + String responseJson = """ + { + "embeddings": [ + [ + -0.0123 + ] + ] + { + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret", 1); + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + + var result = listener.actionGet(TIMEOUT); + assertThat(result, is(HuggingFaceEmbeddingsModelTests.createModel(getUrl(webServer), "secret", 1, 1))); + } + } + private Map getRequestConfigMap(Map serviceSettings, Map secretSettings) { var builtServiceSettings = new HashMap<>(); builtServiceSettings.putAll(serviceSettings); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java index 6cf70189cea74..cb37ccfead45d 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/huggingface/embeddings/HuggingFaceEmbeddingsModelTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.xpack.inference.services.huggingface.HuggingFaceServiceSettings; import org.elasticsearch.xpack.inference.services.settings.DefaultSecretSettings; +import static org.elasticsearch.xpack.inference.services.ServiceUtils.createUri; import static org.hamcrest.Matchers.is; public class HuggingFaceEmbeddingsModelTests extends ESTestCase { @@ -31,4 +32,24 @@ public static HuggingFaceEmbeddingsModel createModel(String url, String apiKey) new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) ); } + + public static HuggingFaceEmbeddingsModel createModel(String url, String apiKey, int tokenLimit) { + return new HuggingFaceEmbeddingsModel( + "id", + TaskType.TEXT_EMBEDDING, + "service", + new HuggingFaceServiceSettings(createUri(url), null, null, tokenLimit), + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } + + public static HuggingFaceEmbeddingsModel createModel(String url, String apiKey, int tokenLimit, int dimensions) { + return new HuggingFaceEmbeddingsModel( + "id", + TaskType.TEXT_EMBEDDING, + "service", + new HuggingFaceServiceSettings(createUri(url), null, dimensions, tokenLimit), + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java index a82600c537663..394286ee5287b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/OpenAiServiceTests.java @@ -15,6 +15,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.inference.InferenceServiceResults; +import org.elasticsearch.inference.Model; import org.elasticsearch.inference.ModelConfigurations; import org.elasticsearch.inference.ModelSecrets; import org.elasticsearch.inference.TaskType; @@ -40,10 +41,10 @@ import java.util.Set; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.xpack.inference.Utils.inferenceUtilityPool; +import static org.elasticsearch.xpack.inference.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.http.Utils.entityAsMap; import static org.elasticsearch.xpack.inference.external.http.Utils.getUrl; -import static org.elasticsearch.xpack.inference.external.http.Utils.inferenceUtilityPool; -import static org.elasticsearch.xpack.inference.external.http.Utils.mockClusterServiceEmpty; import static org.elasticsearch.xpack.inference.external.request.openai.OpenAiUtils.ORGANIZATION_HEADER; import static org.elasticsearch.xpack.inference.results.TextEmbeddingResultsTests.buildExpectation; import static org.elasticsearch.xpack.inference.services.ServiceComponentsTests.createWithEmptySettings; @@ -731,6 +732,42 @@ public void testInfer_SendsRequest() throws IOException { } } + public void testCheckModelConfig_IncludesMaxTokens() throws IOException { + var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); + + try (var service = new OpenAiService(new SetOnce<>(senderFactory), new SetOnce<>(createWithEmptySettings(threadPool)))) { + + String responseJson = """ + { + "object": "list", + "data": [ + { + "object": "embedding", + "index": 0, + "embedding": [ + 0.0123, + -0.0123 + ] + } + ], + "model": "text-embedding-ada-002-v2", + "usage": { + "prompt_tokens": 8, + "total_tokens": 8 + } + } + """; + webServer.enqueue(new MockResponse().setResponseCode(200).setBody(responseJson)); + + var model = OpenAiEmbeddingsModelTests.createModel(getUrl(webServer), "org", "secret", "model", "user", 1); + PlainActionFuture listener = new PlainActionFuture<>(); + service.checkModelConfig(model, listener); + + var result = listener.actionGet(TIMEOUT); + assertThat(result, is(OpenAiEmbeddingsModelTests.createModel(getUrl(webServer), "org", "secret", "model", "user", 1, 2))); + } + } + public void testInfer_UnauthorisedResponse() throws IOException { var senderFactory = new HttpRequestSenderFactory(threadPool, clientManager, mockClusterServiceEmpty(), Settings.EMPTY); diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java index 302aacdc30606..10e856ec8a27e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/openai/embeddings/OpenAiEmbeddingsModelTests.java @@ -64,4 +64,41 @@ public static OpenAiEmbeddingsModel createModel( new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) ); } + + public static OpenAiEmbeddingsModel createModel( + String url, + @Nullable String org, + String apiKey, + String modelName, + @Nullable String user, + @Nullable Integer tokenLimit + ) { + return new OpenAiEmbeddingsModel( + "id", + TaskType.TEXT_EMBEDDING, + "service", + new OpenAiServiceSettings(url, org, SimilarityMeasure.DOT_PRODUCT, 1536, tokenLimit), + new OpenAiEmbeddingsTaskSettings(modelName, user), + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } + + public static OpenAiEmbeddingsModel createModel( + String url, + @Nullable String org, + String apiKey, + String modelName, + @Nullable String user, + @Nullable Integer tokenLimit, + @Nullable Integer dimensions + ) { + return new OpenAiEmbeddingsModel( + "id", + TaskType.TEXT_EMBEDDING, + "service", + new OpenAiServiceSettings(url, org, SimilarityMeasure.DOT_PRODUCT, dimensions, tokenLimit), + new OpenAiEmbeddingsTaskSettings(modelName, user), + new DefaultSecretSettings(new SecureString(apiKey.toCharArray())) + ); + } } diff --git a/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java b/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java index d93c24356422f..f75dd2926059a 100644 --- a/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java +++ b/x-pack/plugin/logstash/src/test/java/org/elasticsearch/xpack/logstash/action/TransportGetPipelineActionTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; import org.elasticsearch.test.MockUtils; @@ -116,9 +115,14 @@ public void onFailure(Exception e) { * Test that the explicit and wildcard IDs are requested. */ public void testGetPipelinesByExplicitAndWildcardIds() { - InternalSearchResponse internalSearchResponse = new InternalSearchResponse(prepareSearchHits(), null, null, null, false, null, 1); SearchResponse searchResponse = new SearchResponse( - internalSearchResponse, + prepareSearchHits(), + null, + null, + false, + null, + null, + 1, null, 1, 1, diff --git a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java index d04bb88325cc7..ad5e224efd5db 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java +++ b/x-pack/plugin/mapper-counted-keyword/src/main/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapper.java @@ -78,16 +78,24 @@ public class CountedKeywordFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "counted_keyword"; public static final String COUNT_FIELD_NAME_SUFFIX = "_count"; - public static final FieldType FIELD_TYPE; + private static final FieldType FIELD_TYPE_INDEXED; + private static final FieldType FIELD_TYPE_NOT_INDEXED; static { - FieldType ft = new FieldType(); - ft.setDocValuesType(DocValuesType.SORTED_SET); - ft.setTokenized(false); - ft.setOmitNorms(true); - ft.setIndexOptions(IndexOptions.DOCS); - ft.freeze(); - FIELD_TYPE = freezeAndDeduplicateFieldType(ft); + FieldType indexed = new FieldType(); + indexed.setDocValuesType(DocValuesType.SORTED_SET); + indexed.setTokenized(false); + indexed.setOmitNorms(true); + indexed.setIndexOptions(IndexOptions.DOCS); + FIELD_TYPE_INDEXED = freezeAndDeduplicateFieldType(indexed); + + FieldType notIndexed = new FieldType(); + notIndexed.setDocValuesType(DocValuesType.SORTED_SET); + notIndexed.setTokenized(false); + notIndexed.setOmitNorms(true); + notIndexed.setIndexOptions(IndexOptions.NONE); + FIELD_TYPE_NOT_INDEXED = freezeAndDeduplicateFieldType(notIndexed); + } private static class CountedKeywordFieldType extends StringFieldType { @@ -261,7 +269,12 @@ public TermsEnum termsEnum() throws IOException { } } + private static CountedKeywordFieldMapper toType(FieldMapper in) { + return (CountedKeywordFieldMapper) in; + } + public static class Builder extends FieldMapper.Builder { + private final Parameter indexed = Parameter.indexParam(m -> toType(m).mappedFieldType.isIndexed(), true); private final Parameter> meta = Parameter.metaParam(); protected Builder(String name) { @@ -270,22 +283,24 @@ protected Builder(String name) { @Override protected Parameter[] getParameters() { - return new Parameter[] { meta }; + return new Parameter[] { meta, indexed }; } @Override public FieldMapper build(MapperBuilderContext context) { BinaryFieldMapper countFieldMapper = new BinaryFieldMapper.Builder(name + COUNT_FIELD_NAME_SUFFIX, true).build(context); + boolean isIndexed = indexed.getValue(); + FieldType ft = isIndexed ? FIELD_TYPE_INDEXED : FIELD_TYPE_NOT_INDEXED; return new CountedKeywordFieldMapper( name, - FIELD_TYPE, + ft, new CountedKeywordFieldType( context.buildFullName(name), - true, + isIndexed, false, true, - new TextSearchInfo(FIELD_TYPE, null, KEYWORD_ANALYZER, KEYWORD_ANALYZER), + new TextSearchInfo(ft, null, KEYWORD_ANALYZER, KEYWORD_ANALYZER), meta.getValue(), countFieldMapper.fieldType() ), diff --git a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java index 1468ed456b132..2ffd4468c814a 100644 --- a/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java +++ b/x-pack/plugin/mapper-counted-keyword/src/test/java/org/elasticsearch/xpack/countedkeyword/CountedKeywordFieldMapperTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.countedkeyword; +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; import org.elasticsearch.index.mapper.DocumentMapper; import org.elasticsearch.index.mapper.MappedFieldType; @@ -82,4 +84,15 @@ public void testDottedFieldNames() throws IOException { List fields = doc.rootDoc().getFields("dotted.field"); assertEquals(1, fields.size()); } + + public void testDisableIndex() throws IOException { + DocumentMapper mapper = createDocumentMapper( + fieldMapping(b -> b.field("type", CountedKeywordFieldMapper.CONTENT_TYPE).field("index", false)) + ); + ParsedDocument doc = mapper.parse(source(b -> b.field("field", "1234"))); + List fields = doc.rootDoc().getFields("field"); + assertEquals(1, fields.size()); + assertEquals(IndexOptions.NONE, fields.get(0).fieldType().indexOptions()); + assertEquals(DocValuesType.SORTED_SET, fields.get(0).fieldType().docValuesType()); + } } diff --git a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java index 43ab090e94381..2f3f9cbf3f32c 100644 --- a/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java +++ b/x-pack/plugin/ml-package-loader/src/main/java/org/elasticsearch/xpack/ml/packageloader/action/ModelLoaderUtils.java @@ -130,9 +130,13 @@ static VocabularyParts loadVocabulary(URI uri) { // visible for testing static VocabularyParts parseVocabParts(InputStream vocabInputStream) throws IOException { - XContentParser sourceParser = XContentType.JSON.xContent() - .createParser(XContentParserConfiguration.EMPTY, Streams.limitStream(vocabInputStream, VOCABULARY_SIZE_LIMIT.getBytes())); - Map> vocabParts = sourceParser.map(HashMap::new, XContentParser::list); + Map> vocabParts; + try ( + XContentParser sourceParser = XContentType.JSON.xContent() + .createParser(XContentParserConfiguration.EMPTY, Streams.limitStream(vocabInputStream, VOCABULARY_SIZE_LIMIT.getBytes())) + ) { + vocabParts = sourceParser.map(HashMap::new, XContentParser::list); + } List vocabulary = vocabParts.containsKey(VOCABULARY) ? vocabParts.get(VOCABULARY).stream().map(Object::toString).collect(Collectors.toList()) diff --git a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle index 3f2f85e3e09da..64970d18b5c82 100644 --- a/x-pack/plugin/ml/qa/basic-multi-node/build.gradle +++ b/x-pack/plugin/ml/qa/basic-multi-node/build.gradle @@ -1,4 +1,3 @@ -import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.legacy-java-rest-test' @@ -17,7 +16,6 @@ testClusters.configureEach { setting 'xpack.license.self_generated.type', 'trial' setting 'indices.lifecycle.history_index_enabled', 'false' setting 'slm.history_index_enabled', 'false' - requiresFeature 'es.learning_to_rank_feature_flag_enabled', Version.fromString("8.12.0") } if (BuildParams.inFipsJvm){ diff --git a/x-pack/plugin/ml/qa/ml-with-security/build.gradle b/x-pack/plugin/ml/qa/ml-with-security/build.gradle index df2eb2c687fb5..f2ec17093bb93 100644 --- a/x-pack/plugin/ml/qa/ml-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/ml-with-security/build.gradle @@ -1,4 +1,3 @@ -import org.elasticsearch.gradle.Version apply plugin: 'elasticsearch.legacy-yaml-rest-test' dependencies { @@ -258,5 +257,4 @@ testClusters.configureEach { user username: "no_ml", password: "x-pack-test-password", role: "minimal" setting 'xpack.license.self_generated.type', 'trial' setting 'xpack.security.enabled', 'true' - requiresFeature 'es.learning_to_rank_feature_flag_enabled', Version.fromString("8.12.0") } diff --git a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle index 1cc37f5c4ffc0..9d931974d25d5 100644 --- a/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/ml/qa/multi-cluster-tests-with-security/build.gradle @@ -2,6 +2,7 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE +import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -49,16 +50,29 @@ testClusters.register('mixed-cluster') { tasks.register('remote-cluster', RestIntegTestTask) { mustRunAfter("precommit") systemProperty 'tests.rest.suite', 'remote_cluster' + maybeDisableForFips(it) } tasks.register('mixed-cluster', RestIntegTestTask) { dependsOn 'remote-cluster' useCluster remoteCluster systemProperty 'tests.rest.suite', 'multi_cluster' + maybeDisableForFips(it) } tasks.register("integTest") { dependsOn 'mixed-cluster' + maybeDisableForFips(it) } tasks.named("check").configure { dependsOn("integTest") } + +//TODO: remove with version 8.14. A new FIPS setting was added in 8.13. Since FIPS configures all test clusters and this specific integTest uses +// the previous minor version, that setting is not available when running in FIPS until 8.14. +def maybeDisableForFips(task) { + if (BuildParams.inFipsJvm) { + if(Version.fromString(project.version).before(Version.fromString('8.14.0'))) { + task.enabled = false + } + } +} diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java index 113ed9a5aa686..398ef5f2e743a 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java @@ -6,19 +6,20 @@ */ package org.elasticsearch.xpack.ml.integration; +import org.apache.logging.log4j.Level; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.admin.cluster.node.hotthreads.NodeHotThreads; -import org.elasticsearch.action.admin.cluster.node.hotthreads.NodesHotThreadsResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.master.AcknowledgedResponse; +import org.elasticsearch.common.ReferenceDocs; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.query.QueryBuilders; +import org.elasticsearch.monitor.jvm.HotThreads; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.AggregationBuilders; @@ -468,11 +469,7 @@ public void testRealtime() throws Exception { StopDatafeedAction.Response stopJobResponse = stopDatafeed(datafeedId); assertTrue(stopJobResponse.isStopped()); } catch (Exception e) { - NodesHotThreadsResponse nodesHotThreadsResponse = clusterAdmin().prepareNodesHotThreads().get(); - int i = 0; - for (NodeHotThreads nodeHotThreads : nodesHotThreadsResponse.getNodes()) { - logger.info(i++ + ":\n" + nodeHotThreads.getHotThreads()); - } + HotThreads.logLocalHotThreads(logger, Level.INFO, "hot threads at failure", ReferenceDocs.LOGGING); throw e; } assertBusy(() -> { @@ -491,11 +488,7 @@ public void testCloseJobStopsRealtimeDatafeed() throws Exception { CloseJobAction.Response closeJobResponse = closeJob(jobId); assertTrue(closeJobResponse.isClosed()); } catch (Exception e) { - NodesHotThreadsResponse nodesHotThreadsResponse = clusterAdmin().prepareNodesHotThreads().get(); - int i = 0; - for (NodeHotThreads nodeHotThreads : nodesHotThreadsResponse.getNodes()) { - logger.info(i++ + ":\n" + nodeHotThreads.getHotThreads()); - } + HotThreads.logLocalHotThreads(logger, Level.INFO, "hot threads at failure", ReferenceDocs.LOGGING); throw e; } assertBusy(() -> { @@ -538,11 +531,7 @@ public void testCloseJobStopsLookbackOnlyDatafeed() throws Exception { CloseJobAction.Response closeJobResponse = closeJob(jobId, useForce); assertTrue(closeJobResponse.isClosed()); } catch (Exception e) { - NodesHotThreadsResponse nodesHotThreadsResponse = clusterAdmin().prepareNodesHotThreads().get(); - int i = 0; - for (NodeHotThreads nodeHotThreads : nodesHotThreadsResponse.getNodes()) { - logger.info(i++ + ":\n" + nodeHotThreads.getHotThreads()); - } + HotThreads.logLocalHotThreads(logger, Level.INFO, "hot threads at failure", ReferenceDocs.LOGGING); throw e; } GetDatafeedsStatsAction.Request request = new GetDatafeedsStatsAction.Request(datafeedId); diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ExplainDataFrameAnalyticsIT.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ExplainDataFrameAnalyticsIT.java index ecc601b0f1eae..5ce0a24a40d9d 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ExplainDataFrameAnalyticsIT.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/ExplainDataFrameAnalyticsIT.java @@ -7,11 +7,12 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.DocWriteRequest; import org.elasticsearch.action.bulk.BulkRequestBuilder; import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.support.RefCountingListener; +import org.elasticsearch.action.support.SubscribableListener; import org.elasticsearch.action.support.WriteRequest; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.index.query.QueryBuilders; @@ -27,11 +28,11 @@ import org.elasticsearch.xpack.core.ml.utils.QueryProvider; import java.io.IOException; -import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.concurrent.atomic.AtomicReference; import java.util.stream.Collectors; import static org.hamcrest.Matchers.contains; @@ -203,26 +204,26 @@ public void testSimultaneousExplainSameConfig() throws IOException { ) .buildForExplain(); - List> futures = new ArrayList<>(); - - for (int i = 0; i < simultaneousInvocationCount; ++i) { - futures.add(client().execute(ExplainDataFrameAnalyticsAction.INSTANCE, new ExplainDataFrameAnalyticsAction.Request(config))); - } - - ExplainDataFrameAnalyticsAction.Response previous = null; - for (ActionFuture future : futures) { - // The main purpose of this test is that actionGet() here will throw an exception - // if any of the simultaneous calls returns an error due to interaction between - // the many estimation processes that get run - ExplainDataFrameAnalyticsAction.Response current = future.actionGet(10000); - if (previous != null) { - // A secondary check the test can perform is that the multiple invocations - // return the same result (but it was failures due to unwanted interactions - // that caused this test to be written) - assertEquals(previous, current); + safeAwait(SubscribableListener.newForked(testListener -> { + try (var listeners = new RefCountingListener(testListener)) { + final var firstResponseRef = new AtomicReference(); + for (int i = 0; i < simultaneousInvocationCount; ++i) { + client().execute( + ExplainDataFrameAnalyticsAction.INSTANCE, + new ExplainDataFrameAnalyticsAction.Request(config), + // The main purpose of this test is that the action will complete its listener exceptionally if any of the + // simultaneous calls returns an error due to interaction between the many estimation processes that get run. + listeners.acquire(response -> { + // A secondary check the test can perform is that the multiple invocations return the same result + // (but it was failures due to unwanted interactions that caused this test to be written) + assertNotNull(response); + firstResponseRef.compareAndSet(null, response); + assertEquals(firstResponseRef.get(), response); + }) + ); + } } - previous = current; - } + })); } public void testRuntimeFields() { diff --git a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java index 858c5ba946f78..ecfb2f81bf452 100644 --- a/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java +++ b/x-pack/plugin/ml/qa/native-multi-node-tests/src/javaRestTest/java/org/elasticsearch/xpack/ml/integration/MlNativeIntegTestCase.java @@ -80,6 +80,8 @@ import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsTaskState; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.core.ml.job.config.MlFilter; @@ -91,8 +93,6 @@ import org.elasticsearch.xpack.ilm.IndexLifecycle; import org.elasticsearch.xpack.ml.LocalStateMachineLearning; import org.elasticsearch.xpack.ml.autoscaling.MlScalingReason; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.slm.SnapshotLifecycle; import org.elasticsearch.xpack.slm.history.SnapshotLifecycleTemplateRegistry; import org.elasticsearch.xpack.transform.Transform; diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java index 5cf87cff66a25..9b3326a4ba348 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/ChunkedTrainedModelPersisterIT.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsDest; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsSource; import org.elasticsearch.xpack.core.ml.dataframe.analyses.Regression; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelDefinition; import org.elasticsearch.xpack.core.ml.inference.TrainedModelDefinitionTests; @@ -36,7 +37,6 @@ import org.elasticsearch.xpack.ml.extractor.DocValueField; import org.elasticsearch.xpack.ml.extractor.ExtractedField; import org.elasticsearch.xpack.ml.extractor.ExtractedFields; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.modelsize.ModelSizeInfo; import org.elasticsearch.xpack.ml.inference.modelsize.ModelSizeInfoTests; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; diff --git a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java index 891779e28439b..b4ffe46e6ea92 100644 --- a/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java +++ b/x-pack/plugin/ml/src/internalClusterTest/java/org/elasticsearch/xpack/ml/integration/JobResultsProviderIT.java @@ -63,6 +63,7 @@ import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.TimingStats; +import org.elasticsearch.xpack.core.ml.job.results.Result; import org.elasticsearch.xpack.core.ml.utils.MlIndexAndAlias; import org.elasticsearch.xpack.core.ml.utils.ToXContentParams; import org.elasticsearch.xpack.ml.MlSingleNodeTestCase; @@ -846,6 +847,16 @@ public void testGetSnapshots() { assertNull(snapshots.get(3).getQuantiles()); assertNull(snapshots.get(4).getQuantiles()); + // test get single snapshot + PlainActionFuture> singleFuture = new PlainActionFuture<>(); + jobProvider.getModelSnapshot(jobId, "snap_1", true, singleFuture::onResponse, singleFuture::onFailure); + ModelSnapshot withQuantiles = singleFuture.actionGet().result; + assertThat(withQuantiles.getQuantiles().getTimestamp().getTime(), equalTo(11L)); + + singleFuture = new PlainActionFuture<>(); + jobProvider.getModelSnapshot(jobId, "snap_2", false, singleFuture::onResponse, singleFuture::onFailure); + ModelSnapshot withoutQuantiles = singleFuture.actionGet().result; + assertNull(withoutQuantiles.getQuantiles()); } public void testGetAutodetectParams() throws Exception { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/DefaultMachineLearningExtension.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/DefaultMachineLearningExtension.java index fa94bf96c1167..66f4797ef707c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/DefaultMachineLearningExtension.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/DefaultMachineLearningExtension.java @@ -51,6 +51,11 @@ public boolean isNlpEnabled() { return true; } + @Override + public boolean isLearningToRankEnabled() { + return true; + } + @Override public String[] getAnalyticsDestIndexAllowedSettings() { return ANALYTICS_DEST_INDEX_ALLOWED_SETTINGS; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java index 1031d45facf85..f3254245168b8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearning.java @@ -74,6 +74,7 @@ import org.elasticsearch.plugins.SystemIndexPlugin; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestHandler; +import org.elasticsearch.telemetry.TelemetryProvider; import org.elasticsearch.threadpool.ExecutorBuilder; import org.elasticsearch.threadpool.ScalingExecutorBuilder; import org.elasticsearch.threadpool.ThreadPool; @@ -188,6 +189,8 @@ import org.elasticsearch.xpack.core.ml.dataframe.evaluation.MlEvaluationNamedXContentProvider; import org.elasticsearch.xpack.core.ml.dataframe.stats.AnalysisStatsNamedWriteablesProvider; import org.elasticsearch.xpack.core.ml.inference.MlInferenceNamedXContentProvider; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; @@ -318,16 +321,13 @@ import org.elasticsearch.xpack.ml.dataframe.process.NativeMemoryUsageEstimationProcessFactory; import org.elasticsearch.xpack.ml.dataframe.process.results.AnalyticsResult; import org.elasticsearch.xpack.ml.dataframe.process.results.MemoryUsageEstimationResult; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.TrainedModelStatsService; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentClusterService; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentService; import org.elasticsearch.xpack.ml.inference.deployment.DeploymentManager; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; import org.elasticsearch.xpack.ml.inference.ltr.LearningToRankRescorerBuilder; -import org.elasticsearch.xpack.ml.inference.ltr.LearningToRankRescorerFeature; import org.elasticsearch.xpack.ml.inference.ltr.LearningToRankService; import org.elasticsearch.xpack.ml.inference.modelsize.MlModelSizeNamedXContentProvider; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; @@ -373,6 +373,7 @@ import org.elasticsearch.xpack.ml.process.NativeController; import org.elasticsearch.xpack.ml.process.NativeStorageProvider; import org.elasticsearch.xpack.ml.queries.TextExpansionQueryBuilder; +import org.elasticsearch.xpack.ml.queries.WeightedTokensQueryBuilder; import org.elasticsearch.xpack.ml.rest.RestDeleteExpiredDataAction; import org.elasticsearch.xpack.ml.rest.RestMlInfoAction; import org.elasticsearch.xpack.ml.rest.RestMlMemoryAction; @@ -890,7 +891,7 @@ private static void reportClashingNodeAttribute(String attrName) { @Override public List> getRescorers() { - if (enabled && LearningToRankRescorerFeature.isEnabled()) { + if (enabled && machineLearningExtension.get().isLearningToRankEnabled()) { return List.of( new RescorerSpec<>( LearningToRankRescorerBuilder.NAME, @@ -910,6 +911,7 @@ public Collection createComponents(PluginServices services) { Environment environment = services.environment(); NamedXContentRegistry xContentRegistry = services.xContentRegistry(); IndexNameExpressionResolver indexNameExpressionResolver = services.indexNameExpressionResolver(); + TelemetryProvider telemetryProvider = services.telemetryProvider(); if (enabled == false) { // Holders for @link(MachineLearningFeatureSetUsage) which needs access to job manager and ML extension, @@ -1051,7 +1053,7 @@ public Collection createComponents(PluginServices services) { normalizerProcessFactory = (jobId, quantilesState, bucketSpan, executorService) -> new MultiplyingNormalizerProcess(1.0); analyticsProcessFactory = (jobId, analyticsProcessConfig, hasState, executorService, onProcessCrash) -> null; memoryEstimationProcessFactory = (jobId, analyticsProcessConfig, hasState, executorService, onProcessCrash) -> null; - pyTorchProcessFactory = (task, executorService, onProcessCrash) -> new BlackHolePyTorchProcess(); + pyTorchProcessFactory = (task, executorService, afterInputStreamClose, onProcessCrash) -> new BlackHolePyTorchProcess(); } NormalizerFactory normalizerFactory = new NormalizerFactory( normalizerProcessFactory, @@ -1251,6 +1253,14 @@ public Collection createComponents(PluginServices services) { machineLearningExtension.get().isNlpEnabled() ); + MlMetrics mlMetrics = new MlMetrics( + telemetryProvider.getMeterRegistry(), + clusterService, + settings, + autodetectProcessManager, + dataFrameAnalyticsManager + ); + return List.of( mlLifeCycleService, new MlControllerHolder(mlController), @@ -1282,7 +1292,8 @@ public Collection createComponents(PluginServices services) { trainedModelAllocationClusterServiceSetOnce.get(), deploymentManager.get(), nodeAvailabilityZoneMapper, - new MachineLearningExtensionHolder(machineLearningExtension.get()) + new MachineLearningExtensionHolder(machineLearningExtension.get()), + mlMetrics ); } @@ -1720,6 +1731,11 @@ public List> getQueries() { TextExpansionQueryBuilder.NAME, TextExpansionQueryBuilder::new, TextExpansionQueryBuilder::fromXContent + ), + new QuerySpec( + WeightedTokensQueryBuilder.NAME, + WeightedTokensQueryBuilder::new, + WeightedTokensQueryBuilder::fromXContent ) ); } @@ -1801,7 +1817,7 @@ public List getNamedXContent() { ); namedXContent.addAll(new CorrelationNamedContentProvider().getNamedXContentParsers()); // LTR Combine with Inference named content provider when feature flag is removed - if (LearningToRankRescorerFeature.isEnabled()) { + if (machineLearningExtension.get().isLearningToRankEnabled()) { namedXContent.addAll(new MlLTRNamedXContentProvider().getNamedXContentParsers()); } return namedXContent; @@ -1889,7 +1905,7 @@ public List getNamedWriteables() { namedWriteables.addAll(new CorrelationNamedContentProvider().getNamedWriteables()); namedWriteables.addAll(new ChangePointNamedContentProvider().getNamedWriteables()); // LTR Combine with Inference named content provider when feature flag is removed - if (LearningToRankRescorerFeature.isEnabled()) { + if (machineLearningExtension.get().isLearningToRankEnabled()) { namedWriteables.addAll(new MlLTRNamedXContentProvider().getNamedWriteables()); } return namedWriteables; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningExtension.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningExtension.java index 552344b4ef10e..c27568c6e3b5c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningExtension.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MachineLearningExtension.java @@ -25,6 +25,10 @@ default void configure(Settings settings) {} boolean isNlpEnabled(); + default boolean isLearningToRankEnabled() { + return false; + } + String[] getAnalyticsDestIndexAllowedSettings(); AbstractNodeAvailabilityZoneMapper getNodeAvailabilityZoneMapper(Settings settings, ClusterSettings clusterSettings); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlLifeCycleService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlLifeCycleService.java index c6a360a018e2a..976e5ec255b85 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlLifeCycleService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlLifeCycleService.java @@ -15,9 +15,9 @@ import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.datafeed.DatafeedRunner; import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsManager; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.process.MlController; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlMetrics.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlMetrics.java new file mode 100644 index 0000000000000..f2cedd4bf0f6b --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/MlMetrics.java @@ -0,0 +1,563 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml; + +import org.apache.logging.log4j.LogManager; +import org.apache.logging.log4j.Logger; +import org.elasticsearch.cluster.ClusterChangedEvent; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.ClusterStateListener; +import org.elasticsearch.cluster.node.DiscoveryNode; +import org.elasticsearch.cluster.node.DiscoveryNodeRole; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.component.AbstractLifecycleComponent; +import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.telemetry.metric.LongWithAttributes; +import org.elasticsearch.telemetry.metric.MeterRegistry; +import org.elasticsearch.xpack.core.ml.MlTasks; +import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; +import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; +import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsManager; +import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; +import org.elasticsearch.xpack.ml.utils.NativeMemoryCalculator; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.Optional; + +import static org.elasticsearch.xpack.core.ml.MachineLearningField.USE_AUTO_MACHINE_MEMORY_PERCENT; +import static org.elasticsearch.xpack.core.ml.MlTasks.DATAFEED_TASK_NAME; +import static org.elasticsearch.xpack.core.ml.MlTasks.DATA_FRAME_ANALYTICS_TASK_NAME; +import static org.elasticsearch.xpack.core.ml.MlTasks.JOB_SNAPSHOT_UPGRADE_TASK_NAME; +import static org.elasticsearch.xpack.core.ml.MlTasks.JOB_TASK_NAME; +import static org.elasticsearch.xpack.ml.MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD; + +/** + * This class adds two types of ML metrics to the meter registry, such that they can be collected by Elastic APM. + *

+ * 1. Per-node ML native memory statistics for ML nodes + * 2. Cluster-wide job/model statuses for master-eligible nodes + *

+ * The memory metrics relate solely to the ML node they are collected from. + *

+ * The job/model metrics are cluster-wide because a key problem we want to be able to detect is when there are + * jobs or models that are not assigned to any node. The consumer of the data needs to account for the fact that + * multiple master-eligible nodes are reporting the same information. The es.ml.is_master attribute in the records + * indicates which one was actually master, so can be used to deduplicate. + */ +public final class MlMetrics extends AbstractLifecycleComponent implements ClusterStateListener { + + private static final Logger logger = LogManager.getLogger(MlMetrics.class); + + private final MeterRegistry meterRegistry; + private final ClusterService clusterService; + private final AutodetectProcessManager autodetectProcessManager; + private final DataFrameAnalyticsManager dataFrameAnalyticsManager; + private final boolean hasMasterRole; + private final boolean hasMlRole; + private final List metrics = new ArrayList<>(); + + private static final Map MASTER_TRUE_MAP = Map.of("es.ml.is_master", Boolean.TRUE); + private static final Map MASTER_FALSE_MAP = Map.of("es.ml.is_master", Boolean.FALSE); + private volatile Map isMasterMap = MASTER_FALSE_MAP; + private volatile boolean firstTime = true; + + private volatile MlTaskStatusCounts mlTaskStatusCounts = MlTaskStatusCounts.EMPTY; + private volatile TrainedModelAllocationCounts trainedModelAllocationCounts = TrainedModelAllocationCounts.EMPTY; + + private volatile long nativeMemLimit; + private volatile long nativeMemAdUsage; + private volatile long nativeMemDfaUsage; + private volatile long nativeMemTrainedModelUsage; + private volatile long nativeMemFree; + + public MlMetrics( + MeterRegistry meterRegistry, + ClusterService clusterService, + Settings settings, + AutodetectProcessManager autodetectProcessManager, + DataFrameAnalyticsManager dataFrameAnalyticsManager + ) { + this.meterRegistry = meterRegistry; + this.clusterService = clusterService; + this.autodetectProcessManager = autodetectProcessManager; + this.dataFrameAnalyticsManager = dataFrameAnalyticsManager; + hasMasterRole = DiscoveryNode.hasRole(settings, DiscoveryNodeRole.MASTER_ROLE); + hasMlRole = DiscoveryNode.hasRole(settings, DiscoveryNodeRole.ML_ROLE); + if (hasMasterRole || hasMlRole) { + clusterService.addListener(this); + } + } + + private void registerMlNodeMetrics(MeterRegistry meterRegistry) { + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.native_memory.limit", + "ML native memory limit on this node.", + "bytes", + () -> new LongWithAttributes(nativeMemLimit, Map.of()) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.native_memory.usage.anomaly_detectors", + "ML native memory used by anomaly detection jobs on this node.", + "bytes", + () -> new LongWithAttributes(nativeMemAdUsage, Map.of()) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.native_memory.usage.data_frame_analytics", + "ML native memory used by data frame analytics jobs on this node.", + "bytes", + () -> new LongWithAttributes(nativeMemDfaUsage, Map.of()) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.native_memory.usage.trained_models", + "ML native memory used by trained models on this node.", + "bytes", + () -> new LongWithAttributes(nativeMemTrainedModelUsage, Map.of()) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.native_memory.free", + "Free ML native memory on this node.", + "bytes", + () -> new LongWithAttributes(nativeMemFree, Map.of()) + ) + ); + } + + private void registerMasterNodeMetrics(MeterRegistry meterRegistry) { + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.anomaly_detectors.opening.count", + "Count of anomaly detection jobs in the opening state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.adOpeningCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.anomaly_detectors.opened.count", + "Count of anomaly detection jobs in the opened state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.adOpenedCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.anomaly_detectors.closing.count", + "Count of anomaly detection jobs in the closing state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.adClosingCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.anomaly_detectors.failed.count", + "Count of anomaly detection jobs in the failed state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.adFailedCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.datafeeds.starting.count", + "Count of datafeeds in the starting state cluster-wide.", + "datafeeds", + () -> new LongWithAttributes(mlTaskStatusCounts.datafeedStartingCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.datafeeds.started.count", + "Count of datafeeds in the started state cluster-wide.", + "datafeeds", + () -> new LongWithAttributes(mlTaskStatusCounts.datafeedStartedCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.datafeeds.stopping.count", + "Count of datafeeds in the stopping state cluster-wide.", + "datafeeds", + () -> new LongWithAttributes(mlTaskStatusCounts.datafeedStoppingCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.data_frame_analytics.starting.count", + "Count of data frame analytics jobs in the starting state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.dfaStartingCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.data_frame_analytics.started.count", + "Count of data frame analytics jobs in the started state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.dfaStartedCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.data_frame_analytics.reindexing.count", + "Count of data frame analytics jobs in the reindexing state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.dfaReindexingCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.data_frame_analytics.analyzing.count", + "Count of data frame analytics jobs in the analyzing state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.dfaAnalyzingCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.data_frame_analytics.stopping.count", + "Count of data frame analytics jobs in the stopping state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.dfaStoppingCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.data_frame_analytics.failed.count", + "Count of data frame analytics jobs in the failed state cluster-wide.", + "jobs", + () -> new LongWithAttributes(mlTaskStatusCounts.dfaFailedCount, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.trained_models.deployment.target_allocations.count", + "Sum of target trained model allocations across all deployments cluster-wide.", + "allocations", + () -> new LongWithAttributes(trainedModelAllocationCounts.trainedModelsTargetAllocations, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.trained_models.deployment.current_allocations.count", + "Sum of current trained model allocations across all deployments cluster-wide.", + "allocations", + () -> new LongWithAttributes(trainedModelAllocationCounts.trainedModelsCurrentAllocations, isMasterMap) + ) + ); + metrics.add( + meterRegistry.registerLongGauge( + "es.ml.trained_models.deployment.failed_allocations.count", + "Sum of failed trained model allocations across all deployments cluster-wide.", + "allocations", + () -> new LongWithAttributes(trainedModelAllocationCounts.trainedModelsFailedAllocations, isMasterMap) + ) + ); + } + + @Override + protected void doStart() { + metrics.clear(); + if (hasMasterRole) { + registerMasterNodeMetrics(meterRegistry); + } + if (hasMlRole) { + registerMlNodeMetrics(meterRegistry); + } + } + + @Override + protected void doStop() {} + + @Override + protected void doClose() { + metrics.forEach(metric -> { + try { + metric.close(); + } catch (Exception e) { + logger.warn("metrics close() method should not throw Exception", e); + } + }); + } + + /** + * Metric values are recalculated in response to cluster state changes and then cached. + * This means that the telemetry provider can poll the metrics registry as often as it + * likes without causing extra work in recalculating the metric values. + */ + @Override + public void clusterChanged(ClusterChangedEvent event) { + isMasterMap = event.localNodeMaster() ? MASTER_TRUE_MAP : MASTER_FALSE_MAP; + + if (event.state().blocks().hasGlobalBlock(GatewayService.STATE_NOT_RECOVERED_BLOCK)) { + // Wait until the gateway has recovered from disk. + return; + } + + boolean mustRecalculateFreeMem = false; + + final ClusterState currentState = event.state(); + final ClusterState previousState = event.previousState(); + + if (firstTime || event.metadataChanged()) { + final PersistentTasksCustomMetadata tasks = currentState.getMetadata().custom(PersistentTasksCustomMetadata.TYPE); + final PersistentTasksCustomMetadata oldTasks = firstTime + ? null + : previousState.getMetadata().custom(PersistentTasksCustomMetadata.TYPE); + if (tasks != null && tasks.equals(oldTasks) == false) { + if (hasMasterRole) { + mlTaskStatusCounts = findTaskStatuses(tasks); + } + if (hasMlRole) { + nativeMemAdUsage = findAdMemoryUsage(autodetectProcessManager); + nativeMemDfaUsage = findDfaMemoryUsage(dataFrameAnalyticsManager, tasks); + mustRecalculateFreeMem = true; + } + } + } + + final TrainedModelAssignmentMetadata currentMetadata = TrainedModelAssignmentMetadata.fromState(currentState); + final TrainedModelAssignmentMetadata previousMetadata = firstTime ? null : TrainedModelAssignmentMetadata.fromState(previousState); + if (currentMetadata != null && currentMetadata.equals(previousMetadata) == false) { + if (hasMasterRole) { + trainedModelAllocationCounts = findTrainedModelAllocationCounts(currentMetadata); + } + if (hasMlRole) { + nativeMemTrainedModelUsage = findTrainedModelMemoryUsage(currentMetadata, currentState.nodes().getLocalNode().getId()); + mustRecalculateFreeMem = true; + } + } + + if (firstTime) { + firstTime = false; + nativeMemLimit = findNativeMemoryLimit(currentState.nodes().getLocalNode(), clusterService.getClusterSettings()); + mustRecalculateFreeMem = true; + // Install a listener to recalculate limit and free in response to settings changes. + // This isn't done in the constructor, but instead only after the three usage variables + // have been populated. Doing this means that immediately after startup, when the stats + // are inaccurate, they'll _all_ be zero. Installing the settings listeners immediately + // could mean that free would be misleadingly set based on zero usage when actual usage + // is _not_ zero. + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(USE_AUTO_MACHINE_MEMORY_PERCENT, s -> memoryLimitClusterSettingUpdated()); + clusterService.getClusterSettings() + .addSettingsUpdateConsumer(MachineLearning.MAX_MACHINE_MEMORY_PERCENT, s -> memoryLimitClusterSettingUpdated()); + } + + if (mustRecalculateFreeMem) { + nativeMemFree = findNativeMemoryFree(nativeMemLimit, nativeMemAdUsage, nativeMemDfaUsage, nativeMemTrainedModelUsage); + } + } + + /** + * This method is registered to be called whenever a cluster setting is changed that affects + * any of the calculations this class performs. + */ + private void memoryLimitClusterSettingUpdated() { + nativeMemLimit = findNativeMemoryLimit(clusterService.localNode(), clusterService.getClusterSettings()); + nativeMemFree = findNativeMemoryFree(nativeMemLimit, nativeMemAdUsage, nativeMemDfaUsage, nativeMemTrainedModelUsage); + } + + /** + * Returns up-to-date stats about the states of the ML entities that are persistent tasks. + * Currently this includes: + * - Anomaly detection jobs + * - Datafeeds + * - Data frame analytics jobs + *

+ * In the future it could possibly also include model snapshot upgrade tasks. + *

+ * These stats relate to the whole cluster and not just the current node. + *

+ * The caller is expected to cache the returned stats to avoid unnecessary recalculation. + */ + static MlTaskStatusCounts findTaskStatuses(PersistentTasksCustomMetadata tasks) { + + int adOpeningCount = 0; + int adOpenedCount = 0; + int adClosingCount = 0; + int adFailedCount = 0; + int datafeedStartingCount = 0; + int datafeedStartedCount = 0; + int datafeedStoppingCount = 0; + int dfaStartingCount = 0; + int dfaStartedCount = 0; + int dfaReindexingCount = 0; + int dfaAnalyzingCount = 0; + int dfaStoppingCount = 0; + int dfaFailedCount = 0; + + for (PersistentTasksCustomMetadata.PersistentTask task : tasks.tasks()) { + switch (task.getTaskName()) { + case JOB_TASK_NAME: + switch (MlTasks.getJobStateModifiedForReassignments(task)) { + case OPENING -> ++adOpeningCount; + case OPENED -> ++adOpenedCount; + case CLOSING -> ++adClosingCount; + case FAILED -> ++adFailedCount; + } + break; + case DATAFEED_TASK_NAME: + switch (MlTasks.getDatafeedState(task)) { + case STARTING -> ++datafeedStartingCount; + case STARTED -> ++datafeedStartedCount; + case STOPPING -> ++datafeedStoppingCount; + } + break; + case DATA_FRAME_ANALYTICS_TASK_NAME: + switch (MlTasks.getDataFrameAnalyticsState(task)) { + case STARTING -> ++dfaStartingCount; + case STARTED -> ++dfaStartedCount; + case REINDEXING -> ++dfaReindexingCount; + case ANALYZING -> ++dfaAnalyzingCount; + case STOPPING -> ++dfaStoppingCount; + case FAILED -> ++dfaFailedCount; + } + break; + case JOB_SNAPSHOT_UPGRADE_TASK_NAME: + // Not currently tracked + // TODO: consider in the future, especially when we're at the stage of needing to upgrade serverless model snapshots + break; + } + } + + return new MlTaskStatusCounts( + adOpeningCount, + adOpenedCount, + adClosingCount, + adFailedCount, + datafeedStartingCount, + datafeedStartedCount, + datafeedStoppingCount, + dfaStartingCount, + dfaStartedCount, + dfaReindexingCount, + dfaAnalyzingCount, + dfaStoppingCount, + dfaFailedCount + ); + } + + /** + * Return the memory usage, in bytes, of the anomaly detection jobs that are running on the + * current node. + */ + static long findAdMemoryUsage(AutodetectProcessManager autodetectProcessManager) { + return autodetectProcessManager.getOpenProcessMemoryUsage().getBytes(); + } + + /** + * Return the memory usage, in bytes, of the data frame analytics jobs that are running on the + * current node. + */ + static long findDfaMemoryUsage(DataFrameAnalyticsManager dataFrameAnalyticsManager, PersistentTasksCustomMetadata tasks) { + return dataFrameAnalyticsManager.getActiveTaskMemoryUsage(tasks).getBytes(); + } + + /** + * Returns up-to-date stats about the numbers of allocations of ML trained models. + *

+ * These stats relate to the whole cluster and not just the current node. + *

+ * The caller is expected to cache the returned stats to avoid unnecessary recalculation. + */ + static TrainedModelAllocationCounts findTrainedModelAllocationCounts(TrainedModelAssignmentMetadata metadata) { + int trainedModelsTargetAllocations = 0; + int trainedModelsCurrentAllocations = 0; + int trainedModelsFailedAllocations = 0; + + for (TrainedModelAssignment trainedModelAssignment : metadata.allAssignments().values()) { + trainedModelsTargetAllocations += trainedModelAssignment.totalTargetAllocations(); + trainedModelsCurrentAllocations += trainedModelAssignment.totalCurrentAllocations(); + trainedModelsFailedAllocations += trainedModelAssignment.totalFailedAllocations(); + } + + return new TrainedModelAllocationCounts( + trainedModelsTargetAllocations, + trainedModelsCurrentAllocations, + trainedModelsFailedAllocations + ); + } + + /** + * Return the memory usage, in bytes, of the trained models that are running on the + * current node. + */ + static long findTrainedModelMemoryUsage(TrainedModelAssignmentMetadata metadata, String localNodeId) { + long trainedModelMemoryUsageBytes = 0; + for (TrainedModelAssignment assignment : metadata.allAssignments().values()) { + if (Optional.ofNullable(assignment.getNodeRoutingTable().get(localNodeId)) + .map(RoutingInfo::getState) + .orElse(RoutingState.STOPPED) + .consumesMemory()) { + trainedModelMemoryUsageBytes += assignment.getTaskParams().estimateMemoryUsageBytes(); + } + } + return trainedModelMemoryUsageBytes; + } + + /** + * Return the maximum amount of memory, in bytes, permitted for ML processes running on the + * current node. + */ + static long findNativeMemoryLimit(DiscoveryNode localNode, ClusterSettings settings) { + return NativeMemoryCalculator.allowedBytesForMl(localNode, settings).orElse(0L); + } + + /** + * Return the amount of free memory, in bytes, that remains available for ML processes running on the + * current node. + */ + static long findNativeMemoryFree(long nativeMemLimit, long nativeMemAdUsage, long nativeMemDfaUsage, long nativeMemTrainedModelUsage) { + long totalUsage = nativeMemAdUsage + nativeMemDfaUsage + nativeMemTrainedModelUsage; + if (totalUsage > 0) { + totalUsage += NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes(); + } + return nativeMemLimit - totalUsage; + } + + record MlTaskStatusCounts( + int adOpeningCount, + int adOpenedCount, + int adClosingCount, + int adFailedCount, + int datafeedStartingCount, + int datafeedStartedCount, + int datafeedStoppingCount, + int dfaStartingCount, + int dfaStartedCount, + int dfaReindexingCount, + int dfaAnalyzingCount, + int dfaStoppingCount, + int dfaFailedCount + ) { + static final MlTaskStatusCounts EMPTY = new MlTaskStatusCounts(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); + } + + record TrainedModelAllocationCounts( + int trainedModelsTargetAllocations, + int trainedModelsCurrentAllocations, + int trainedModelsFailedAllocations + ) { + static final TrainedModelAllocationCounts EMPTY = new TrainedModelAllocationCounts(0, 0, 0); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportClearDeploymentCacheAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportClearDeploymentCacheAction.java index 44235882a6582..5ecd0322674e1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportClearDeploymentCacheAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportClearDeploymentCacheAction.java @@ -23,8 +23,8 @@ import org.elasticsearch.xpack.core.ml.action.ClearDeploymentCacheAction.Request; import org.elasticsearch.xpack.core.ml.action.ClearDeploymentCacheAction.Response; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.deployment.TrainedModelDeploymentTask; import java.util.List; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java index 7442f1db0a662..9c368c1a162a8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportCoordinatedInferenceAction.java @@ -28,9 +28,9 @@ import org.elasticsearch.xpack.core.inference.action.InferenceAction; import org.elasticsearch.xpack.core.ml.action.CoordinatedInferenceAction; import org.elasticsearch.xpack.core.ml.action.InferModelAction; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentUtils; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.EmptyConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfigUpdate; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentUtils; import java.util.ArrayList; import java.util.function.Supplier; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java index 093e4213a5db1..49f73056cd8bd 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAction.java @@ -41,9 +41,9 @@ import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.action.DeleteTrainedModelAction; import org.elasticsearch.xpack.core.ml.action.StopTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAliasAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAliasAction.java index 73601ef86ff13..fe8a4ff029d69 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAliasAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportDeleteTrainedModelAliasAction.java @@ -32,7 +32,7 @@ import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.DeleteTrainedModelAliasAction; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; import java.util.HashMap; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsAction.java index 36d225a943348..14afd6999b0c0 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetDeploymentStatsAction.java @@ -28,7 +28,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.deployment.ModelStats; import org.elasticsearch.xpack.ml.inference.deployment.TrainedModelDeploymentTask; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java index e6d1fe30d7646..78d030d454f0b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsAction.java @@ -21,11 +21,11 @@ import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction.Request; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction.Response; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; import java.util.Collections; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java index 3c9ba3700dc8e..76321608ba4fb 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsAction.java @@ -43,16 +43,16 @@ import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsStatsAction; import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentStats; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceStats; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.TrainedModelSizeStats; import org.elasticsearch.xpack.core.ml.utils.TransportVersionUtils; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelDefinitionDoc; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java index 3cf0189c28df2..6a8dca8e2776b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportInternalInferModelAction.java @@ -32,15 +32,15 @@ import org.elasticsearch.xpack.core.ml.action.InferModelAction.Request; import org.elasticsearch.xpack.core.ml.action.InferModelAction.Response; import org.elasticsearch.xpack.core.ml.action.InferTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentState; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.results.ErrorInferenceResults; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.loadingservice.LocalModel; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java index 7462b6cd918aa..5206799735c52 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAction.java @@ -60,8 +60,10 @@ import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction.Request; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAction.Response; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.LenientlyParsedInferenceConfig; @@ -72,8 +74,6 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.MlPlatformArchitecturesUtil; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; import org.elasticsearch.xpack.ml.utils.TaskRetriever; @@ -289,7 +289,7 @@ protected void masterOperation( .execute(ActionListener.wrap(stats -> { IndexStats indexStats = stats.getIndices().get(InferenceIndexConstants.nativeDefinitionStore()); if (indexStats != null - && indexStats.getTotal().getStore().getSizeInBytes() > MAX_NATIVE_DEFINITION_INDEX_SIZE.getBytes()) { + && indexStats.getTotal().getStore().sizeInBytes() > MAX_NATIVE_DEFINITION_INDEX_SIZE.getBytes()) { finalResponseListener.onFailure( new ElasticsearchStatusException( "Native model store has exceeded the maximum acceptable size of {}, " @@ -583,24 +583,27 @@ static InferenceConfig parseInferenceConfigFromModelPackage( NamedXContentRegistry namedXContentRegistry, DeprecationHandler deprecationHandler ) throws IOException { - XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(source); - XContentParser sourceParser = XContentType.JSON.xContent() - .createParser( - XContentParserConfiguration.EMPTY.withRegistry(namedXContentRegistry).withDeprecationHandler(deprecationHandler), - BytesReference.bytes(xContentBuilder).streamInput() - ); - - XContentParser.Token token = sourceParser.nextToken(); - assert token == XContentParser.Token.START_OBJECT; - token = sourceParser.nextToken(); - assert token == XContentParser.Token.FIELD_NAME; - String currentName = sourceParser.currentName(); - - InferenceConfig inferenceConfig = sourceParser.namedObject(LenientlyParsedInferenceConfig.class, currentName, null); - // consume the end object token - token = sourceParser.nextToken(); - assert token == XContentParser.Token.END_OBJECT; - return inferenceConfig; + try ( + XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().map(source); + XContentParser sourceParser = XContentType.JSON.xContent() + .createParser( + XContentParserConfiguration.EMPTY.withRegistry(namedXContentRegistry).withDeprecationHandler(deprecationHandler), + BytesReference.bytes(xContentBuilder).streamInput() + ) + ) { + + XContentParser.Token token = sourceParser.nextToken(); + assert token == XContentParser.Token.START_OBJECT; + token = sourceParser.nextToken(); + assert token == XContentParser.Token.FIELD_NAME; + String currentName = sourceParser.currentName(); + + InferenceConfig inferenceConfig = sourceParser.namedObject(LenientlyParsedInferenceConfig.class, currentName, null); + // consume the end object token + token = sourceParser.nextToken(); + assert token == XContentParser.Token.END_OBJECT; + return inferenceConfig; + } } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAliasAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAliasAction.java index de760d8fa17ed..79560b8b8e94e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAliasAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportPutTrainedModelAliasAction.java @@ -36,14 +36,14 @@ import org.elasticsearch.xpack.core.ml.MachineLearningField; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.action.PutTrainedModelAliasAction; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; import org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java index 5450b2752ab97..c01c1f46b3d13 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportRevertModelSnapshotAction.java @@ -272,7 +272,7 @@ private static void getModelSnapshot( return; } - provider.getModelSnapshot(request.getJobId(), request.getSnapshotId(), modelSnapshot -> { + provider.getModelSnapshot(request.getJobId(), request.getSnapshotId(), true, modelSnapshot -> { if (modelSnapshot == null) { throw missingSnapshotException(request); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetResetModeAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetResetModeAction.java index 2cb8fc847bb62..7c52e086ec43c 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetResetModeAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportSetResetModeAction.java @@ -18,7 +18,7 @@ import org.elasticsearch.xpack.core.action.SetResetModeActionRequest; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.SetResetModeAction; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; public class TransportSetResetModeAction extends AbstractTransportSetResetModeAction { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java index 4a569b374582a..ecfe4c8aac6c6 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStartTrainedModelDeploymentAction.java @@ -56,13 +56,13 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.IndexLocation; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.TransportVersionUtils; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentService; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelDefinitionDoc; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopTrainedModelDeploymentAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopTrainedModelDeploymentAction.java index 6e90d097d1e9f..5b2c3fdeddf43 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopTrainedModelDeploymentAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportStopTrainedModelDeploymentAction.java @@ -35,10 +35,10 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.ml.action.StopTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentClusterService; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.deployment.TrainedModelDeploymentTask; import org.elasticsearch.xpack.ml.notifications.InferenceAuditor; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java index f0872bccc8378..097be745996ab 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpdateModelSnapshotAction.java @@ -71,7 +71,8 @@ protected void doExecute( ActionListener listener ) { logger.debug("Received request to update model snapshot [{}] for job [{}]", request.getSnapshotId(), request.getJobId()); - jobResultsProvider.getModelSnapshot(request.getJobId(), request.getSnapshotId(), modelSnapshot -> { + // Even though the quantiles can be large we have to fetch them initially so that the updated document is complete + jobResultsProvider.getModelSnapshot(request.getJobId(), request.getSnapshotId(), true, modelSnapshot -> { if (modelSnapshot == null) { listener.onFailure( new ResourceNotFoundException( @@ -81,8 +82,7 @@ protected void doExecute( } else { Result updatedSnapshot = applyUpdate(request, modelSnapshot); indexModelSnapshot(updatedSnapshot, b -> { - // The quantiles can be large, and totally dominate the output - - // it's clearer to remove them + // The quantiles can be large, and totally dominate the output - it's clearer to remove them at this stage listener.onResponse( new UpdateModelSnapshotAction.Response(new ModelSnapshot.Builder(updatedSnapshot.result).setQuantiles(null).build()) ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java index 3f6193c124a9a..15c1d53f7bdf8 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportUpgradeJobModelSnapshotAction.java @@ -223,6 +223,7 @@ protected void masterOperation(Task task, Request request, ClusterState state, A jobResultsProvider.getModelSnapshot( request.getJobId(), request.getSnapshotId(), + false, getSnapshotHandler::onResponse, getSnapshotHandler::onFailure ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java index 57d0084065fa5..cca59f27d5c76 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlAutoscalingContext.java @@ -17,11 +17,11 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.AllocationStatus; import org.elasticsearch.xpack.core.ml.inference.assignment.AssignmentState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeState; import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeTaskParams; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import java.util.Collection; import java.util.List; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDecider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDecider.java index 5605a80a7454c..44cf1188b09a2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDecider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDecider.java @@ -17,7 +17,7 @@ import org.elasticsearch.xpack.autoscaling.capacity.AutoscalingDeciderContext; import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.utils.MlProcessors; import java.time.Instant; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java index 829101b3bd551..223154737df3f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/DataFrameAnalyticsManager.java @@ -20,12 +20,16 @@ import org.elasticsearch.cluster.metadata.MappingMetadata; import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.IndexNotFoundException; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ClientHelper; import org.elasticsearch.xpack.core.ml.MlStatsIndex; +import org.elasticsearch.xpack.core.ml.MlTasks; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; +import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsState; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndex; import org.elasticsearch.xpack.core.ml.job.persistence.ElasticsearchMappings; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -45,7 +49,10 @@ import org.elasticsearch.xpack.ml.notifications.DataFrameAnalyticsAuditor; import org.elasticsearch.xpack.ml.utils.persistence.ResultsPersisterService; +import java.util.Map; import java.util.Objects; +import java.util.Optional; +import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import static org.elasticsearch.core.Strings.format; @@ -72,6 +79,8 @@ public class DataFrameAnalyticsManager { /** Indicates whether the node is shutting down. */ private final AtomicBoolean nodeShuttingDown = new AtomicBoolean(); + private final Map memoryLimitById; + public DataFrameAnalyticsManager( Settings settings, NodeClient client, @@ -84,6 +93,37 @@ public DataFrameAnalyticsManager( ResultsPersisterService resultsPersisterService, ModelLoadingService modelLoadingService, String[] destIndexAllowedSettings + ) { + this( + settings, + client, + threadPool, + clusterService, + configProvider, + processManager, + auditor, + expressionResolver, + resultsPersisterService, + modelLoadingService, + destIndexAllowedSettings, + new ConcurrentHashMap<>() + ); + } + + // For testing only + public DataFrameAnalyticsManager( + Settings settings, + NodeClient client, + ThreadPool threadPool, + ClusterService clusterService, + DataFrameAnalyticsConfigProvider configProvider, + AnalyticsProcessManager processManager, + DataFrameAnalyticsAuditor auditor, + IndexNameExpressionResolver expressionResolver, + ResultsPersisterService resultsPersisterService, + ModelLoadingService modelLoadingService, + String[] destIndexAllowedSettings, + Map memoryLimitById ) { this.settings = Objects.requireNonNull(settings); this.client = Objects.requireNonNull(client); @@ -96,11 +136,13 @@ public DataFrameAnalyticsManager( this.resultsPersisterService = Objects.requireNonNull(resultsPersisterService); this.modelLoadingService = Objects.requireNonNull(modelLoadingService); this.destIndexAllowedSettings = Objects.requireNonNull(destIndexAllowedSettings); + this.memoryLimitById = Objects.requireNonNull(memoryLimitById); } public void execute(DataFrameAnalyticsTask task, ClusterState clusterState, TimeValue masterNodeTimeout) { // With config in hand, determine action to take ActionListener configListener = ActionListener.wrap(config -> { + memoryLimitById.put(config.getId(), config.getModelMemoryLimit()); // Check if existing destination index is incompatible. // If it is, we delete it and start from reindexing. IndexMetadata destIndex = clusterState.getMetadata().index(config.getDest().getIndex()); @@ -224,6 +266,7 @@ private void executeStep(DataFrameAnalyticsTask task, DataFrameAnalyticsConfig c case FINAL -> { LOGGER.info("[{}] Marking task completed", config.getId()); task.markAsCompleted(); + memoryLimitById.remove(config.getId()); } default -> task.markAsFailed(ExceptionsHelper.serverError("Unknown step [{}]", step)); } @@ -291,4 +334,34 @@ public boolean isNodeShuttingDown() { public void markNodeAsShuttingDown() { nodeShuttingDown.set(true); } + + /** + * Get the memory limit for a data frame analytics job if known. + * The memory limit will only be known if it is running on the + * current node, or has been very recently. + * @param id Data frame analytics job ID. + * @return The {@link ByteSizeValue} representing the memory limit, if known, otherwise {@link Optional#empty}. + */ + public Optional getMemoryLimitIfKnown(String id) { + return Optional.ofNullable(memoryLimitById.get(id)); + } + + /** + * Finds the memory used by data frame analytics jobs that are active on the current node. + * This includes jobs that are in the reindexing state, even though they don't have a running + * process, because we want to ensure that when they get as far as needing to run a process + * there'll be space for it. + * @param tasks Persistent tasks metadata. + * @return Memory used by data frame analytics jobs that are active on the current node. + */ + public ByteSizeValue getActiveTaskMemoryUsage(PersistentTasksCustomMetadata tasks) { + long memoryUsedBytes = 0; + for (Map.Entry entry : memoryLimitById.entrySet()) { + DataFrameAnalyticsState state = MlTasks.getDataFrameAnalyticsState(entry.getKey(), tasks); + if (state.consumesMemory()) { + memoryUsedBytes += entry.getValue().getBytes() + DataFrameAnalyticsConfig.PROCESS_MEMORY_OVERHEAD.getBytes(); + } + } + return ByteSizeValue.ofBytes(memoryUsedBytes); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java index 618cbc075bd99..68dc2bf496a15 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/dataframe/persistence/DataFrameAnalyticsConfigProvider.java @@ -176,14 +176,12 @@ public void update( // Parse the original config DataFrameAnalyticsConfig originalConfig; - try { - try ( - InputStream stream = getResponse.getSourceAsBytesRef().streamInput(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, stream) - ) { - originalConfig = DataFrameAnalyticsConfig.LENIENT_PARSER.apply(parser, null).build(); - } + try ( + InputStream stream = getResponse.getSourceAsBytesRef().streamInput(); + XContentParser parser = XContentFactory.xContent(XContentType.JSON) + .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, stream) + ) { + originalConfig = DataFrameAnalyticsConfig.LENIENT_PARSER.apply(parser, null).build(); } catch (IOException e) { listener.onFailure(new ElasticsearchParseException("Failed to parse data frame analytics configuration [" + id + "]", e)); return; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java index fe4462d6556ee..471615e8bbd6a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterService.java @@ -44,6 +44,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.MlPlatformArchitecturesUtil; @@ -67,8 +68,8 @@ import java.util.stream.Collectors; import static org.elasticsearch.core.Strings.format; -import static org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentUtils.NODES_CHANGED_REASON; -import static org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentUtils.createShuttingDownRoute; +import static org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentUtils.NODES_CHANGED_REASON; +import static org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentUtils.createShuttingDownRoute; public class TrainedModelAssignmentClusterService implements ClusterStateListener { @@ -1099,31 +1100,61 @@ static boolean haveMlNodesChanged(ClusterChangedEvent event, TrainedModelAssignm // it may get re-allocated to that node when another node is added/removed... boolean nodesShutdownChanged = event.changedCustomMetadataSet().contains(NodesShutdownMetadata.TYPE); if (event.nodesChanged() || nodesShutdownChanged) { + // This is just to track the various log messages that happen in this function to help with debugging in the future + // so that we can reasonably assume they're all related + // If the log messages printed from this method get interlaced across nodes it can make debugging difficult + var eventIdentity = Long.toHexString(System.nanoTime()); + Set shuttingDownNodes = nodesShuttingDown(event.state()); DiscoveryNodes.Delta nodesDelta = event.nodesDelta(); Set removedNodes = nodesDelta.removedNodes().stream().map(DiscoveryNode::getId).collect(Collectors.toSet()); Set addedNodes = nodesDelta.addedNodes().stream().map(DiscoveryNode::getId).collect(Collectors.toSet()); + logger.debug( + () -> format( + "Initial node change info; identity: %s; removed nodes: %s; added nodes: %s; shutting down nodes: %s", + eventIdentity, + removedNodes, + addedNodes, + shuttingDownNodes + ) + ); + Set exitingShutDownNodes; if (nodesShutdownChanged) { Set previousShuttingDownNodes = nodesShuttingDown(event.previousState()); + Set presentNodes = event.state().nodes().stream().map(DiscoveryNode::getId).collect(Collectors.toSet()); // Add nodes that where marked for shutdown in the previous state // but are no longer marked as shutdown in the current state. - Set returningShutDownNodes = Sets.difference(previousShuttingDownNodes, shuttingDownNodes); + // The intersection is to only include the nodes that actually exist + Set returningShutDownNodes = Sets.intersection( + presentNodes, + Sets.difference(previousShuttingDownNodes, shuttingDownNodes) + ); addedNodes.addAll(returningShutDownNodes); // and nodes that are marked for shutdown in this event only exitingShutDownNodes = Sets.difference(shuttingDownNodes, previousShuttingDownNodes); removedNodes.addAll(exitingShutDownNodes); + + logger.debug( + () -> format( + "Shutting down nodes were changed; identity: %s; previous shutting down nodes: %s; returning nodes: %s", + eventIdentity, + previousShuttingDownNodes, + returningShutDownNodes + ) + ); } else { exitingShutDownNodes = Collections.emptySet(); } logger.debug( () -> format( - "added nodes %s; removed nodes %s; shutting down nodes %s; exiting shutdown nodes %s", + "identity: %s; added nodes %s; removed nodes %s; shutting down nodes %s; exiting shutdown nodes %s", + eventIdentity, addedNodes, removedNodes, shuttingDownNodes, diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java index fdb007862cfdc..3fac7c387b12e 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeService.java @@ -41,6 +41,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingStateAndReason; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.InferenceConfig; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; @@ -66,8 +67,8 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.xpack.core.ml.MlTasks.TRAINED_MODEL_ASSIGNMENT_TASK_ACTION; import static org.elasticsearch.xpack.core.ml.MlTasks.TRAINED_MODEL_ASSIGNMENT_TASK_TYPE; +import static org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentUtils.NODE_IS_SHUTTING_DOWN; import static org.elasticsearch.xpack.ml.MachineLearning.ML_PYTORCH_MODEL_INFERENCE_FEATURE; -import static org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentUtils.NODE_IS_SHUTTING_DOWN; public class TrainedModelAssignmentNodeService implements ClusterStateListener { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java index 6e6b447fcea3d..a1142796558f4 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancer.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.inference.assignment.planning.AssignmentPlan; import org.elasticsearch.xpack.ml.inference.assignment.planning.AssignmentPlanner; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentService.java index 1a5b5481704a4..0609e0e6ff916 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentService.java @@ -33,6 +33,7 @@ import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.action.UpdateTrainedModelAssignmentRoutingInfoAction; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import java.util.Objects; import java.util.function.Predicate; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java index f48e67f377817..ef5de2718e702 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/deployment/DeploymentManager.java @@ -496,7 +496,14 @@ synchronized void startAndLoad(TrainedModelLocation modelLocation, ActionListene } logger.debug("[{}] start and load", task.getDeploymentId()); - process.set(pyTorchProcessFactory.createProcess(task, executorServiceForProcess, this::onProcessCrash)); + process.set( + pyTorchProcessFactory.createProcess( + task, + executorServiceForProcess, + () -> resultProcessor.awaitCompletion(COMPLETION_TIMEOUT.getMinutes(), TimeUnit.MINUTES), + this::onProcessCrash + ) + ); startTime = Instant.now(); logger.debug("[{}] process started", task.getDeploymentId()); try { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java index e9b7a1a3e137b..5994c61f46297 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingService.java @@ -37,6 +37,7 @@ import org.elasticsearch.tasks.TaskId; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig; @@ -46,7 +47,6 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.inference.InferenceDefinition; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.TrainedModelStatsService; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerFeature.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerFeature.java deleted file mode 100644 index 42598691beec2..0000000000000 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankRescorerFeature.java +++ /dev/null @@ -1,28 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.ml.inference.ltr; - -import org.elasticsearch.common.util.FeatureFlag; - -/** - * Learning to rank feature flag. When the feature is complete, this flag will be removed. - * - * Upon removal, ensure transport serialization is all corrected for future BWC. - * - * See {@link LearningToRankRescorerBuilder} - */ -public class LearningToRankRescorerFeature { - - private LearningToRankRescorerFeature() {} - - private static final FeatureFlag LEARNING_TO_RANK = new FeatureFlag("learning_to_rank"); - - public static boolean isEnabled() { - return LEARNING_TO_RANK.isEnabled(); - } -} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankService.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankService.java index 177099801e0a5..bec162d141eba 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankService.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/ltr/LearningToRankService.java @@ -189,13 +189,13 @@ private QueryExtractorBuilder applyParams(QueryExtractorBuilder queryExtractorBu try { Script script = new Script(ScriptType.INLINE, DEFAULT_TEMPLATE_LANG, templateSource, SCRIPT_OPTIONS, Collections.emptyMap()); String parsedTemplate = scriptService.compile(script, TemplateScript.CONTEXT).newInstance(params).execute(); - XContentParser parser = XContentType.JSON.xContent().createParser(parserConfiguration, parsedTemplate); - - return new QueryExtractorBuilder( - queryExtractorBuilder.featureName(), - QueryProvider.fromXContent(parser, false, INFERENCE_CONFIG_QUERY_BAD_FORMAT), - queryExtractorBuilder.defaultScore() - ); + try (XContentParser parser = XContentType.JSON.xContent().createParser(parserConfiguration, parsedTemplate)) { + return new QueryExtractorBuilder( + queryExtractorBuilder.featureName(), + QueryProvider.fromXContent(parser, false, INFERENCE_CONFIG_QUERY_BAD_FORMAT), + queryExtractorBuilder.defaultScore() + ); + } } catch (GeneralScriptException e) { if (e.getRootCause().getClass().getName().equals(MustacheInvalidParameterException.class.getName())) { // Can't use instanceof since it return unexpected result. diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java index 2be4fe12884b0..c903933a8e350 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/persistence/TrainedModelProvider.java @@ -72,6 +72,7 @@ import org.elasticsearch.xpack.core.ml.MlStatsIndex; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; import org.elasticsearch.xpack.core.ml.inference.InferenceToXContentCompressor; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelType; import org.elasticsearch.xpack.core.ml.inference.persistence.InferenceIndexConstants; @@ -85,7 +86,6 @@ import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.core.ml.utils.ToXContentParams; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.nlp.Vocabulary; import java.io.IOException; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcess.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcess.java index 5908c550d318f..d2e5369ef4bd3 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcess.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcess.java @@ -20,6 +20,7 @@ import java.nio.file.Path; import java.util.Iterator; import java.util.List; +import java.util.concurrent.TimeoutException; import java.util.function.Consumer; public class NativePyTorchProcess extends AbstractNativeProcess implements PyTorchProcess { @@ -27,6 +28,7 @@ public class NativePyTorchProcess extends AbstractNativeProcess implements PyTor private static final String NAME = "pytorch_inference"; private final ProcessResultsParser resultsParser; + private final PyTorchProcessFactory.TimeoutRunnable afterInStreamClose; protected NativePyTorchProcess( String jobId, @@ -34,9 +36,11 @@ protected NativePyTorchProcess( ProcessPipes processPipes, int numberOfFields, List filesToDelete, + PyTorchProcessFactory.TimeoutRunnable afterInStreamClose, Consumer onProcessCrash ) { super(jobId, nativeController, processPipes, numberOfFields, filesToDelete, onProcessCrash); + this.afterInStreamClose = afterInStreamClose; this.resultsParser = new ProcessResultsParser<>(PyTorchResult.PARSER, NamedXContentRegistry.EMPTY); } @@ -71,4 +75,9 @@ public void writeInferenceRequest(BytesReference jsonRequest) throws IOException processInStream().write('\n'); processInStream().flush(); } + + @Override + protected void afterProcessInStreamClose() throws TimeoutException { + afterInStreamClose.run(); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcessFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcessFactory.java index 4585ca29e8d14..b26c6720ed179 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcessFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/NativePyTorchProcessFactory.java @@ -56,6 +56,7 @@ void setProcessConnectTimeout(TimeValue processConnectTimeout) { public NativePyTorchProcess createProcess( TrainedModelDeploymentTask task, ExecutorService executorService, + TimeoutRunnable afterInStreamClose, Consumer onProcessCrash ) { ProcessPipes processPipes = new ProcessPipes( @@ -80,6 +81,7 @@ public NativePyTorchProcess createProcess( processPipes, 0, Collections.emptyList(), + afterInStreamClose, onProcessCrash ); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchProcessFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchProcessFactory.java index 07d9e8faa22ea..507c6115a392d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchProcessFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/inference/pytorch/process/PyTorchProcessFactory.java @@ -10,9 +10,19 @@ import org.elasticsearch.xpack.ml.inference.deployment.TrainedModelDeploymentTask; import java.util.concurrent.ExecutorService; +import java.util.concurrent.TimeoutException; import java.util.function.Consumer; public interface PyTorchProcessFactory { - PyTorchProcess createProcess(TrainedModelDeploymentTask task, ExecutorService executorService, Consumer onProcessCrash); + interface TimeoutRunnable { + void run() throws TimeoutException; + } + + PyTorchProcess createProcess( + TrainedModelDeploymentTask task, + ExecutorService executorService, + TimeoutRunnable afterInStreamClose, + Consumer onProcessCrash + ); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index 035f4864ebace..7532ae4317830 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -478,24 +478,27 @@ private void validate(Job job, JobUpdate jobUpdate, ActionListener handler private void validateModelSnapshotIdUpdate(Job job, String modelSnapshotId, VoidChainTaskExecutor voidChainTaskExecutor) { if (modelSnapshotId != null && ModelSnapshot.isTheEmptySnapshot(modelSnapshotId) == false) { - voidChainTaskExecutor.add(listener -> jobResultsProvider.getModelSnapshot(job.getId(), modelSnapshotId, newModelSnapshot -> { - if (newModelSnapshot == null) { - String message = Messages.getMessage(Messages.REST_NO_SUCH_MODEL_SNAPSHOT, modelSnapshotId, job.getId()); - listener.onFailure(new ResourceNotFoundException(message)); - return; - } - jobResultsProvider.getModelSnapshot(job.getId(), job.getModelSnapshotId(), oldModelSnapshot -> { - if (oldModelSnapshot != null && newModelSnapshot.result.getTimestamp().before(oldModelSnapshot.result.getTimestamp())) { - String message = "Job [" - + job.getId() - + "] has a more recent model snapshot [" - + oldModelSnapshot.result.getSnapshotId() - + "]"; - listener.onFailure(new IllegalArgumentException(message)); + voidChainTaskExecutor.add( + listener -> jobResultsProvider.getModelSnapshot(job.getId(), modelSnapshotId, false, newModelSnapshot -> { + if (newModelSnapshot == null) { + String message = Messages.getMessage(Messages.REST_NO_SUCH_MODEL_SNAPSHOT, modelSnapshotId, job.getId()); + listener.onFailure(new ResourceNotFoundException(message)); + return; } - listener.onResponse(null); - }, listener::onFailure); - }, listener::onFailure)); + jobResultsProvider.getModelSnapshot(job.getId(), job.getModelSnapshotId(), false, oldModelSnapshot -> { + if (oldModelSnapshot != null + && newModelSnapshot.result.getTimestamp().before(oldModelSnapshot.result.getTimestamp())) { + String message = "Job [" + + job.getId() + + "] has a more recent model snapshot [" + + oldModelSnapshot.result.getSnapshotId() + + "]"; + listener.onFailure(new IllegalArgumentException(message)); + } + listener.onResponse(null); + }, listener::onFailure); + }, listener::onFailure) + ); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java index 548c95d1ddd50..f2bf180943b82 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/NodeLoadDetector.java @@ -16,10 +16,10 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.utils.MemoryTrackedTaskState; import org.elasticsearch.xpack.core.ml.utils.MlTaskParams; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; import org.elasticsearch.xpack.ml.utils.NativeMemoryCalculator; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java index 7b41f3e055874..b661f6294d89a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/persistence/JobResultsProvider.java @@ -1257,11 +1257,13 @@ public BatchedResultsIterator newBatchedInfluencersIterator(String j } /** - * Get a job's model snapshot by its id + * Get a job's model snapshot by its id. + * Quantiles should only be included when strictly required, because they can be very large and consume a lot of heap. */ public void getModelSnapshot( String jobId, @Nullable String modelSnapshotId, + boolean includeQuantiles, Consumer> handler, Consumer errorHandler ) { @@ -1271,6 +1273,9 @@ public void getModelSnapshot( } String resultsIndex = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); SearchRequestBuilder search = createDocIdSearch(resultsIndex, ModelSnapshot.documentId(jobId, modelSnapshotId)); + if (includeQuantiles == false) { + search.setFetchSource(null, ModelSnapshot.QUANTILES.getPreferredName()); + } searchSingleResult( jobId, ModelSnapshot.TYPE.getPreferredName(), diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java index 8deac327c065e..658db2997485d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManager.java @@ -1062,4 +1062,24 @@ public void clusterChanged(ClusterChangedEvent event) { resetInProgress = MlMetadata.getMlMetadata(event.state()).isResetMode(); } + /** + * Finds the memory used by open autodetect processes on the current node. + * @return Memory used by open autodetect processes on the current node. + */ + public ByteSizeValue getOpenProcessMemoryUsage() { + long memoryUsedBytes = 0; + for (ProcessContext processContext : processByAllocation.values()) { + if (processContext.getState() == ProcessContext.ProcessStateName.RUNNING) { + ModelSizeStats modelSizeStats = processContext.getAutodetectCommunicator().getModelSizeStats(); + ModelSizeStats.AssignmentMemoryBasis basis = modelSizeStats.getAssignmentMemoryBasis(); + memoryUsedBytes += switch (basis != null ? basis : ModelSizeStats.AssignmentMemoryBasis.MODEL_MEMORY_LIMIT) { + case MODEL_MEMORY_LIMIT -> modelSizeStats.getModelBytesMemoryLimit(); + case CURRENT_MODEL_BYTES -> modelSizeStats.getModelBytes(); + case PEAK_MODEL_BYTES -> Optional.ofNullable(modelSizeStats.getPeakModelBytes()).orElse(modelSizeStats.getModelBytes()); + }; + memoryUsedBytes += Job.PROCESS_MEMORY_OVERHEAD.getBytes(); + } + } + return ByteSizeValue.ofBytes(memoryUsedBytes); + } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java index 69b926876302a..cc3f8f0dd1e67 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/snapshot/upgrader/SnapshotUpgradeTaskExecutor.java @@ -329,6 +329,6 @@ private void deleteSnapshotAndFailTask(AllocatedPersistentTask task, String jobI ); }); - jobResultsProvider.getModelSnapshot(jobId, snapshotId, modelSnapshotListener::onResponse, modelSnapshotListener::onFailure); + jobResultsProvider.getModelSnapshot(jobId, snapshotId, false, modelSnapshotListener::onResponse, modelSnapshotListener::onFailure); } } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/AbstractNativeProcess.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/AbstractNativeProcess.java index 8ea85208a2de8..dd71800bd4f90 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/AbstractNativeProcess.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/AbstractNativeProcess.java @@ -223,7 +223,7 @@ public void close() throws IOException { * Implementations can override this if they need to perform extra processing * immediately after the native process's input stream is closed. */ - protected void afterProcessInStreamClose() { + protected void afterProcessInStreamClose() throws TimeoutException { // no-op by default } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java index 5ba577eb90ab7..3f502c4d95cc9 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/process/MlMemoryTracker.java @@ -26,12 +26,12 @@ import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeTaskParams; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.dataframe.persistence.DataFrameAnalyticsConfigProvider; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.job.JobManager; import org.elasticsearch.xpack.ml.job.persistence.JobResultsProvider; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java index 12019e93ba713..24383e51b0ed2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilder.java @@ -15,8 +15,8 @@ import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; import org.elasticsearch.index.query.AbstractQueryBuilder; -import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.QueryRewriteContext; @@ -41,6 +41,7 @@ public class TextExpansionQueryBuilder extends AbstractQueryBuilder { public static final String NAME = "text_expansion"; + public static final ParseField PRUNING_CONFIG = new ParseField("pruning_config"); public static final ParseField MODEL_TEXT = new ParseField("model_text"); public static final ParseField MODEL_ID = new ParseField("model_id"); @@ -48,8 +49,13 @@ public class TextExpansionQueryBuilder extends AbstractQueryBuilder weightedTokensSupplier; + private final TokenPruningConfig tokenPruningConfig; public TextExpansionQueryBuilder(String fieldName, String modelText, String modelId) { + this(fieldName, modelText, modelId, null); + } + + public TextExpansionQueryBuilder(String fieldName, String modelText, String modelId, @Nullable TokenPruningConfig tokenPruningConfig) { if (fieldName == null) { throw new IllegalArgumentException("[" + NAME + "] requires a fieldName"); } @@ -59,10 +65,10 @@ public TextExpansionQueryBuilder(String fieldName, String modelText, String mode if (modelId == null) { throw new IllegalArgumentException("[" + NAME + "] requires a " + MODEL_ID.getPreferredName() + " value"); } - this.fieldName = fieldName; this.modelText = modelText; this.modelId = modelId; + this.tokenPruningConfig = tokenPruningConfig; } public TextExpansionQueryBuilder(StreamInput in) throws IOException { @@ -70,12 +76,18 @@ public TextExpansionQueryBuilder(StreamInput in) throws IOException { this.fieldName = in.readString(); this.modelText = in.readString(); this.modelId = in.readString(); + if (in.getTransportVersion().onOrAfter(TransportVersions.TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED)) { + this.tokenPruningConfig = in.readOptionalWriteable(TokenPruningConfig::new); + } else { + this.tokenPruningConfig = null; + } } private TextExpansionQueryBuilder(TextExpansionQueryBuilder other, SetOnce weightedTokensSupplier) { this.fieldName = other.fieldName; this.modelText = other.modelText; this.modelId = other.modelId; + this.tokenPruningConfig = other.tokenPruningConfig; this.boost = other.boost; this.queryName = other.queryName; this.weightedTokensSupplier = weightedTokensSupplier; @@ -85,6 +97,10 @@ String getFieldName() { return fieldName; } + public TokenPruningConfig getTokenPruningConfig() { + return tokenPruningConfig; + } + @Override public String getWriteableName() { return NAME; @@ -103,6 +119,9 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(fieldName); out.writeString(modelText); out.writeString(modelId); + if (out.getTransportVersion().onOrAfter(TransportVersions.TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED)) { + out.writeOptionalWriteable(tokenPruningConfig); + } } @Override @@ -111,6 +130,9 @@ protected void doXContent(XContentBuilder builder, Params params) throws IOExcep builder.startObject(fieldName); builder.field(MODEL_TEXT.getPreferredName(), modelText); builder.field(MODEL_ID.getPreferredName(), modelId); + if (tokenPruningConfig != null) { + builder.field(PRUNING_CONFIG.getPreferredName(), tokenPruningConfig); + } boostAndQueryNameToXContent(builder); builder.endObject(); builder.endObject(); @@ -174,21 +196,33 @@ protected QueryBuilder doRewrite(QueryRewriteContext queryRewriteContext) throws return new TextExpansionQueryBuilder(this, textExpansionResultsSupplier); } - static BoolQueryBuilder weightedTokensToQuery( + private QueryBuilder weightedTokensToQuery( String fieldName, TextExpansionResults textExpansionResults, QueryRewriteContext queryRewriteContext - ) throws IOException { + ) { + if (tokenPruningConfig != null) { + WeightedTokensQueryBuilder weightedTokensQueryBuilder = new WeightedTokensQueryBuilder( + fieldName, + textExpansionResults.getWeightedTokens(), + tokenPruningConfig + ); + weightedTokensQueryBuilder.queryName(queryName); + weightedTokensQueryBuilder.boost(boost); + return weightedTokensQueryBuilder; + } var boolQuery = QueryBuilders.boolQuery(); for (var weightedToken : textExpansionResults.getWeightedTokens()) { boolQuery.should(QueryBuilders.termQuery(fieldName, weightedToken.token()).boost(weightedToken.weight())); } boolQuery.minimumShouldMatch(1); + boolQuery.boost(this.boost); + boolQuery.queryName(this.queryName); return boolQuery; } @Override - protected Query doToQuery(SearchExecutionContext context) throws IOException { + protected Query doToQuery(SearchExecutionContext context) { throw new IllegalStateException("text_expansion should have been rewritten to another query type"); } @@ -197,18 +231,20 @@ protected boolean doEquals(TextExpansionQueryBuilder other) { return Objects.equals(fieldName, other.fieldName) && Objects.equals(modelText, other.modelText) && Objects.equals(modelId, other.modelId) + && Objects.equals(tokenPruningConfig, other.tokenPruningConfig) && Objects.equals(weightedTokensSupplier, other.weightedTokensSupplier); } @Override protected int doHashCode() { - return Objects.hash(fieldName, modelText, modelId, weightedTokensSupplier); + return Objects.hash(fieldName, modelText, modelId, tokenPruningConfig, weightedTokensSupplier); } public static TextExpansionQueryBuilder fromXContent(XContentParser parser) throws IOException { String fieldName = null; String modelText = null; String modelId = null; + TokenPruningConfig tokenPruningConfig = null; float boost = AbstractQueryBuilder.DEFAULT_BOOST; String queryName = null; String currentFieldName = null; @@ -222,6 +258,15 @@ public static TextExpansionQueryBuilder fromXContent(XContentParser parser) thro while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { if (token == XContentParser.Token.FIELD_NAME) { currentFieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + if (PRUNING_CONFIG.match(currentFieldName, parser.getDeprecationHandler())) { + tokenPruningConfig = TokenPruningConfig.fromXContent(parser); + } else { + throw new ParsingException( + parser.getTokenLocation(), + "[" + NAME + "] unknown token [" + token + "] after [" + currentFieldName + "]" + ); + } } else if (token.isValue()) { if (MODEL_TEXT.match(currentFieldName, parser.getDeprecationHandler())) { modelText = parser.text(); @@ -259,7 +304,7 @@ public static TextExpansionQueryBuilder fromXContent(XContentParser parser) thro throw new ParsingException(parser.getTokenLocation(), "No fieldname specified for query"); } - TextExpansionQueryBuilder queryBuilder = new TextExpansionQueryBuilder(fieldName, modelText, modelId); + TextExpansionQueryBuilder queryBuilder = new TextExpansionQueryBuilder(fieldName, modelText, modelId, tokenPruningConfig); queryBuilder.queryName(queryName); queryBuilder.boost(boost); return queryBuilder; diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TokenPruningConfig.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TokenPruningConfig.java new file mode 100644 index 0000000000000..d789a645fd9c4 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/TokenPruningConfig.java @@ -0,0 +1,177 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.queries; + +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.ToXContentObject; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Locale; +import java.util.Objects; +import java.util.Set; + +import static org.elasticsearch.xpack.ml.queries.TextExpansionQueryBuilder.PRUNING_CONFIG; + +public class TokenPruningConfig implements Writeable, ToXContentObject { + public static final ParseField TOKENS_FREQ_RATIO_THRESHOLD = new ParseField("tokens_freq_ratio_threshold"); + public static final ParseField TOKENS_WEIGHT_THRESHOLD = new ParseField("tokens_weight_threshold"); + public static final ParseField ONLY_SCORE_PRUNED_TOKENS_FIELD = new ParseField("only_score_pruned_tokens"); + + // Tokens whose frequency is more than 5 times the average frequency of all tokens in the specified field are considered outliers. + public static final float DEFAULT_TOKENS_FREQ_RATIO_THRESHOLD = 5; + public static final float MAX_TOKENS_FREQ_RATIO_THRESHOLD = 100; + // A token's weight should be > 40% of the best weight in the query to be considered significant. + public static final float DEFAULT_TOKENS_WEIGHT_THRESHOLD = 0.4f; + + private final float tokensFreqRatioThreshold; + private final float tokensWeightThreshold; + private final boolean onlyScorePrunedTokens; + + public TokenPruningConfig() { + this(DEFAULT_TOKENS_FREQ_RATIO_THRESHOLD, DEFAULT_TOKENS_WEIGHT_THRESHOLD, false); + } + + public TokenPruningConfig(float tokensFreqRatioThreshold, float tokensWeightThreshold, boolean onlyScorePrunedTokens) { + if (tokensFreqRatioThreshold < 1 || tokensFreqRatioThreshold > MAX_TOKENS_FREQ_RATIO_THRESHOLD) { + throw new IllegalArgumentException( + "[" + + TOKENS_FREQ_RATIO_THRESHOLD.getPreferredName() + + "] must be between [1.0] and [" + + String.format(Locale.ROOT, "%.1f", MAX_TOKENS_FREQ_RATIO_THRESHOLD) + + "], got " + + tokensFreqRatioThreshold + ); + } + if (tokensWeightThreshold < 0 || tokensWeightThreshold > 1) { + throw new IllegalArgumentException("[" + TOKENS_WEIGHT_THRESHOLD.getPreferredName() + "] must be between 0 and 1"); + } + this.tokensFreqRatioThreshold = tokensFreqRatioThreshold; + this.tokensWeightThreshold = tokensWeightThreshold; + this.onlyScorePrunedTokens = onlyScorePrunedTokens; + } + + public TokenPruningConfig(StreamInput in) throws IOException { + this.tokensFreqRatioThreshold = in.readFloat(); + this.tokensWeightThreshold = in.readFloat(); + this.onlyScorePrunedTokens = in.readBoolean(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeFloat(tokensFreqRatioThreshold); + out.writeFloat(tokensWeightThreshold); + out.writeBoolean(onlyScorePrunedTokens); + } + + /** + * Returns the frequency ratio threshold to apply on the query. + * Tokens whose frequency is more than ratio_threshold times the average frequency of all tokens in the specified + * field are considered outliers and may be subject to removal from the query. + */ + public float getTokensFreqRatioThreshold() { + return tokensFreqRatioThreshold; + } + + /** + * Returns the weight threshold to apply on the query. + * Tokens whose weight is more than (weightThreshold * best_weight) of the highest weight in the query are not + * considered outliers, even if their frequency exceeds the specified ratio_threshold. + * This threshold ensures that important tokens, as indicated by their weight, are retained in the query. + */ + public float getTokensWeightThreshold() { + return tokensWeightThreshold; + } + + /** + * Returns whether the filtering process retains tokens identified as non-relevant based on the specified thresholds + * (ratio and weight). When {@code true}, only non-relevant tokens are considered for matching and scoring documents. + * Enabling this option is valuable for re-scoring top hits retrieved from a {@link WeightedTokensQueryBuilder} with + * active thresholds. + */ + public boolean isOnlyScorePrunedTokens() { + return onlyScorePrunedTokens; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + TokenPruningConfig that = (TokenPruningConfig) o; + return Float.compare(that.tokensFreqRatioThreshold, tokensFreqRatioThreshold) == 0 + && Float.compare(that.tokensWeightThreshold, tokensWeightThreshold) == 0 + && onlyScorePrunedTokens == that.onlyScorePrunedTokens; + } + + @Override + public int hashCode() { + return Objects.hash(tokensFreqRatioThreshold, tokensWeightThreshold, onlyScorePrunedTokens); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.field(TOKENS_FREQ_RATIO_THRESHOLD.getPreferredName(), tokensFreqRatioThreshold); + builder.field(TOKENS_WEIGHT_THRESHOLD.getPreferredName(), tokensWeightThreshold); + if (onlyScorePrunedTokens) { + builder.field(ONLY_SCORE_PRUNED_TOKENS_FIELD.getPreferredName(), onlyScorePrunedTokens); + } + builder.endObject(); + return builder; + } + + public static TokenPruningConfig fromXContent(XContentParser parser) throws IOException { + String currentFieldName = null; + XContentParser.Token token; + float ratioThreshold = DEFAULT_TOKENS_FREQ_RATIO_THRESHOLD; + float weightThreshold = DEFAULT_TOKENS_WEIGHT_THRESHOLD; + boolean onlyScorePrunedTokens = false; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.START_OBJECT) { + continue; + } + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + if (Set.of( + TOKENS_FREQ_RATIO_THRESHOLD.getPreferredName(), + TOKENS_WEIGHT_THRESHOLD.getPreferredName(), + ONLY_SCORE_PRUNED_TOKENS_FIELD.getPreferredName() + ).contains(currentFieldName) == false) { + throw new ParsingException( + parser.getTokenLocation(), + "[" + PRUNING_CONFIG.getPreferredName() + "] unknown token [" + currentFieldName + "]" + ); + } + } else if (token.isValue()) { + if (TOKENS_FREQ_RATIO_THRESHOLD.match(currentFieldName, parser.getDeprecationHandler())) { + ratioThreshold = parser.intValue(); + } else if (TOKENS_WEIGHT_THRESHOLD.match(currentFieldName, parser.getDeprecationHandler())) { + weightThreshold = parser.floatValue(); + } else if (ONLY_SCORE_PRUNED_TOKENS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + onlyScorePrunedTokens = parser.booleanValue(); + } else { + throw new ParsingException( + parser.getTokenLocation(), + "[" + PRUNING_CONFIG.getPreferredName() + "] does not support [" + currentFieldName + "]" + ); + } + } else { + throw new ParsingException( + parser.getTokenLocation(), + "[" + PRUNING_CONFIG.getPreferredName() + "] unknown token [" + token + "] after [" + currentFieldName + "]" + ); + } + } + return new TokenPruningConfig(ratioThreshold, weightThreshold, onlyScorePrunedTokens); + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilder.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilder.java new file mode 100644 index 0000000000000..a09bcadaacfc0 --- /dev/null +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilder.java @@ -0,0 +1,264 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.queries; + +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.Term; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Query; +import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.TransportVersions; +import org.elasticsearch.common.ParsingException; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.core.Nullable; +import org.elasticsearch.index.mapper.MappedFieldType; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.xcontent.ParseField; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults.WeightedToken; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; + +import static org.elasticsearch.xpack.ml.queries.TextExpansionQueryBuilder.PRUNING_CONFIG; + +public class WeightedTokensQueryBuilder extends AbstractQueryBuilder { + public static final String NAME = "weighted_tokens"; + + public static final ParseField TOKENS_FIELD = new ParseField("tokens"); + private final String fieldName; + private final List tokens; + @Nullable + private final TokenPruningConfig tokenPruningConfig; + + public WeightedTokensQueryBuilder(String fieldName, List tokens) { + this(fieldName, tokens, null); + } + + public WeightedTokensQueryBuilder(String fieldName, List tokens, @Nullable TokenPruningConfig tokenPruningConfig) { + this.fieldName = Objects.requireNonNull(fieldName, "[" + NAME + "] requires a fieldName"); + this.tokens = Objects.requireNonNull(tokens, "[" + NAME + "] requires tokens"); + if (tokens.isEmpty()) { + throw new IllegalArgumentException("[" + NAME + "] requires at least one token"); + } + this.tokenPruningConfig = tokenPruningConfig; + } + + public WeightedTokensQueryBuilder(StreamInput in) throws IOException { + super(in); + this.fieldName = in.readString(); + this.tokens = in.readCollectionAsList(WeightedToken::new); + this.tokenPruningConfig = in.readOptionalWriteable(TokenPruningConfig::new); + } + + public String getFieldName() { + return fieldName; + } + + @Nullable + public TokenPruningConfig getTokenPruningConfig() { + return tokenPruningConfig; + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + out.writeString(fieldName); + out.writeCollection(tokens); + out.writeOptionalWriteable(tokenPruningConfig); + } + + @Override + protected void doXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(NAME); + builder.startObject(fieldName); + builder.startObject(TOKENS_FIELD.getPreferredName()); + for (var token : tokens) { + token.toXContent(builder, params); + } + builder.endObject(); + if (tokenPruningConfig != null) { + builder.field(PRUNING_CONFIG.getPreferredName(), tokenPruningConfig); + } + boostAndQueryNameToXContent(builder); + builder.endObject(); + builder.endObject(); + } + + /** + * We calculate the maximum number of unique tokens for any shard of data. The maximum is used to compute + * average token frequency since we don't have a unique inter-segment token count. + * Once we have the maximum number of unique tokens, we use the total count of tokens in the index to calculate + * the average frequency ratio. + * + * @param reader + * @param fieldDocCount + * @return float + * @throws IOException + */ + private float getAverageTokenFreqRatio(IndexReader reader, int fieldDocCount) throws IOException { + int numUniqueTokens = 0; + for (var leaf : reader.getContext().leaves()) { + var terms = leaf.reader().terms(fieldName); + if (terms != null) { + numUniqueTokens = (int) Math.max(terms.size(), numUniqueTokens); + } + } + if (numUniqueTokens == 0) { + return 0; + } + return (float) reader.getSumDocFreq(fieldName) / fieldDocCount / numUniqueTokens; + } + + /** + * Returns true if the token should be queried based on the {@code tokensFreqRatioThreshold} and {@code tokensWeightThreshold} + * set on the query. + */ + private boolean shouldKeepToken( + IndexReader reader, + WeightedToken token, + int fieldDocCount, + float averageTokenFreqRatio, + float bestWeight + ) throws IOException { + if (this.tokenPruningConfig == null) { + return true; + } + int docFreq = reader.docFreq(new Term(fieldName, token.token())); + if (docFreq == 0) { + return false; + } + float tokenFreqRatio = (float) docFreq / fieldDocCount; + return tokenFreqRatio < this.tokenPruningConfig.getTokensFreqRatioThreshold() * averageTokenFreqRatio + || token.weight() > this.tokenPruningConfig.getTokensWeightThreshold() * bestWeight; + } + + @Override + protected Query doToQuery(SearchExecutionContext context) throws IOException { + final MappedFieldType ft = context.getFieldType(fieldName); + if (ft == null) { + return new MatchNoDocsQuery("The \"" + getName() + "\" query is against a field that does not exist"); + } + var qb = new BooleanQuery.Builder(); + int fieldDocCount = context.getIndexReader().getDocCount(fieldName); + float bestWeight = 0f; + for (var t : tokens) { + bestWeight = Math.max(t.weight(), bestWeight); + } + float averageTokenFreqRatio = getAverageTokenFreqRatio(context.getIndexReader(), fieldDocCount); + if (averageTokenFreqRatio == 0) { + return new MatchNoDocsQuery("The \"" + getName() + "\" query is against an empty field"); + } + for (var token : tokens) { + boolean keep = shouldKeepToken(context.getIndexReader(), token, fieldDocCount, averageTokenFreqRatio, bestWeight); + if (this.tokenPruningConfig != null) { + keep ^= this.tokenPruningConfig.isOnlyScorePrunedTokens(); + } + if (keep) { + qb.add(new BoostQuery(ft.termQuery(token.token(), context), token.weight()), BooleanClause.Occur.SHOULD); + } + } + qb.setMinimumNumberShouldMatch(1); + return qb.build(); + } + + @Override + protected boolean doEquals(WeightedTokensQueryBuilder other) { + return Objects.equals(fieldName, other.fieldName) + && Objects.equals(tokenPruningConfig, other.tokenPruningConfig) + && tokens.equals(other.tokens); + } + + @Override + protected int doHashCode() { + return Objects.hash(fieldName, tokens, tokenPruningConfig); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public TransportVersion getMinimalSupportedVersion() { + return TransportVersions.TEXT_EXPANSION_TOKEN_PRUNING_CONFIG_ADDED; + } + + private static float parseWeight(String token, Object weight) throws IOException { + if (weight instanceof Number asNumber) { + return asNumber.floatValue(); + } + if (weight instanceof String asString) { + return Float.parseFloat(asString); + } + throw new ElasticsearchParseException( + "Illegal weight for token: [" + token + "], expected floating point got " + weight.getClass().getSimpleName() + ); + } + + public static WeightedTokensQueryBuilder fromXContent(XContentParser parser) throws IOException { + String currentFieldName = null; + String fieldName = null; + List tokens = new ArrayList<>(); + TokenPruningConfig tokenPruningConfig = null; + float boost = AbstractQueryBuilder.DEFAULT_BOOST; + String queryName = null; + XContentParser.Token token; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (token == XContentParser.Token.START_OBJECT) { + throwParsingExceptionOnMultipleFields(NAME, parser.getTokenLocation(), fieldName, currentFieldName); + fieldName = currentFieldName; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + if (token == XContentParser.Token.FIELD_NAME) { + currentFieldName = parser.currentName(); + } else if (PRUNING_CONFIG.match(currentFieldName, parser.getDeprecationHandler())) { + if (token != XContentParser.Token.START_OBJECT) { + throw new ParsingException( + parser.getTokenLocation(), + "[" + PRUNING_CONFIG.getPreferredName() + "] should be an object" + ); + } + tokenPruningConfig = TokenPruningConfig.fromXContent(parser); + } else if (TOKENS_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + var tokensMap = parser.map(); + for (var e : tokensMap.entrySet()) { + tokens.add(new WeightedToken(e.getKey(), parseWeight(e.getKey(), e.getValue()))); + } + } else if (AbstractQueryBuilder.BOOST_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + boost = parser.floatValue(); + } else if (AbstractQueryBuilder.NAME_FIELD.match(currentFieldName, parser.getDeprecationHandler())) { + queryName = parser.text(); + } else { + throw new ParsingException(parser.getTokenLocation(), "unknown field [" + currentFieldName + "]"); + } + } + } else { + throw new IllegalArgumentException("invalid query"); + } + } + + if (fieldName == null) { + throw new ParsingException(parser.getTokenLocation(), "No fieldname specified for query"); + } + + var qb = new WeightedTokensQueryBuilder(fieldName, tokens, tokenPruningConfig); + qb.queryName(queryName); + qb.boost(boost); + return qb; + } +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java index 1fb45c07c5818..37731fcbfb10b 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/RestDeleteExpiredDataAction.java @@ -49,7 +49,9 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient DeleteExpiredDataAction.Request request; if (restRequest.hasContent()) { - request = DeleteExpiredDataAction.Request.parseRequest(jobId, restRequest.contentParser()); + try (var parser = restRequest.contentParser()) { + request = DeleteExpiredDataAction.Request.parseRequest(jobId, parser); + } } else { request = new DeleteExpiredDataAction.Request(); request.setJobId(jobId); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java index a5f98763d3245..64981805717a1 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestPutDatafeedAction.java @@ -47,8 +47,10 @@ public String getName() { protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException { String datafeedId = restRequest.param(DatafeedConfig.ID.getPreferredName()); IndicesOptions indicesOptions = IndicesOptions.fromRequest(restRequest, SearchRequest.DEFAULT_INDICES_OPTIONS); - XContentParser parser = restRequest.contentParser(); - PutDatafeedAction.Request putDatafeedRequest = PutDatafeedAction.Request.parseRequest(datafeedId, indicesOptions, parser); + PutDatafeedAction.Request putDatafeedRequest; + try (XContentParser parser = restRequest.contentParser()) { + putDatafeedRequest = PutDatafeedAction.Request.parseRequest(datafeedId, indicesOptions, parser); + } putDatafeedRequest.timeout(restRequest.paramAsTime("timeout", putDatafeedRequest.timeout())); putDatafeedRequest.masterNodeTimeout(restRequest.paramAsTime("master_timeout", putDatafeedRequest.masterNodeTimeout())); return channel -> client.execute(PutDatafeedAction.INSTANCE, putDatafeedRequest, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java index f0260a9301edc..97e1514441441 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/datafeeds/RestUpdateDatafeedAction.java @@ -53,8 +53,10 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient || restRequest.hasParam("ignore_throttled")) { indicesOptions = IndicesOptions.fromRequest(restRequest, SearchRequest.DEFAULT_INDICES_OPTIONS); } - XContentParser parser = restRequest.contentParser(); - UpdateDatafeedAction.Request updateDatafeedRequest = UpdateDatafeedAction.Request.parseRequest(datafeedId, indicesOptions, parser); + UpdateDatafeedAction.Request updateDatafeedRequest; + try (XContentParser parser = restRequest.contentParser()) { + updateDatafeedRequest = UpdateDatafeedAction.Request.parseRequest(datafeedId, indicesOptions, parser); + } updateDatafeedRequest.timeout(restRequest.paramAsTime("timeout", updateDatafeedRequest.timeout())); updateDatafeedRequest.masterNodeTimeout(restRequest.paramAsTime("master_timeout", updateDatafeedRequest.masterNodeTimeout())); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPostDataFrameAnalyticsUpdateAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPostDataFrameAnalyticsUpdateAction.java index 48a820360e61b..52a3d83eeb11a 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPostDataFrameAnalyticsUpdateAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPostDataFrameAnalyticsUpdateAction.java @@ -48,8 +48,10 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient } String id = restRequest.param(DataFrameAnalyticsConfig.ID.getPreferredName()); - XContentParser parser = restRequest.contentParser(); - UpdateDataFrameAnalyticsAction.Request updateRequest = UpdateDataFrameAnalyticsAction.Request.parseRequest(id, parser); + UpdateDataFrameAnalyticsAction.Request updateRequest; + try (XContentParser parser = restRequest.contentParser()) { + updateRequest = UpdateDataFrameAnalyticsAction.Request.parseRequest(id, parser); + } updateRequest.timeout(restRequest.paramAsTime("timeout", updateRequest.timeout())); return channel -> client.execute(UpdateDataFrameAnalyticsAction.INSTANCE, updateRequest, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPutDataFrameAnalyticsAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPutDataFrameAnalyticsAction.java index 9a3d958bd3a09..896b1dfdb6df2 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPutDataFrameAnalyticsAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/dataframe/RestPutDataFrameAnalyticsAction.java @@ -57,8 +57,10 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient } String id = restRequest.param(DataFrameAnalyticsConfig.ID.getPreferredName()); - XContentParser parser = restRequest.contentParser(); - PutDataFrameAnalyticsAction.Request putRequest = PutDataFrameAnalyticsAction.Request.parseRequest(id, parser); + PutDataFrameAnalyticsAction.Request putRequest; + try (XContentParser parser = restRequest.contentParser()) { + putRequest = PutDataFrameAnalyticsAction.Request.parseRequest(id, parser); + } putRequest.timeout(restRequest.paramAsTime("timeout", putRequest.timeout())); return channel -> client.execute(PutDataFrameAnalyticsAction.INSTANCE, putRequest, new RestToXContentListener<>(channel)); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestInferTrainedModelAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestInferTrainedModelAction.java index 4afd07479a3eb..78b02871c3c57 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestInferTrainedModelAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/rest/inference/RestInferTrainedModelAction.java @@ -47,7 +47,10 @@ protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient if (restRequest.hasContent() == false) { throw ExceptionsHelper.badRequestException("requires body"); } - InferModelAction.Request.Builder request = InferModelAction.Request.parseRequest(modelId, restRequest.contentParser()); + InferModelAction.Request.Builder request; + try (var parser = restRequest.contentParser()) { + request = InferModelAction.Request.parseRequest(modelId, parser); + } if (restRequest.hasParam(InferModelAction.Request.TIMEOUT.getPreferredName())) { TimeValue inferTimeout = restRequest.paramAsTime( diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/MlParserUtils.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/MlParserUtils.java index 8661497593815..36607e894edef 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/MlParserUtils.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/utils/persistence/MlParserUtils.java @@ -10,14 +10,14 @@ import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.search.SearchHit; import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentParser; +import org.elasticsearch.xcontent.XContentParserConfiguration; import org.elasticsearch.xcontent.XContentType; import java.io.IOException; -import java.io.InputStream; import java.util.function.BiFunction; public final class MlParserUtils { @@ -33,9 +33,12 @@ private MlParserUtils() {} public static T parse(SearchHit hit, BiFunction objectParser) { BytesReference source = hit.getSourceRef(); try ( - InputStream stream = source.streamInput(); - XContentParser parser = XContentFactory.xContent(XContentType.JSON) - .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, stream) + XContentParser parser = XContentHelper.createParserNotCompressed( + XContentParserConfiguration.EMPTY.withRegistry(NamedXContentRegistry.EMPTY) + .withDeprecationHandler(LoggingDeprecationHandler.INSTANCE), + source, + XContentType.JSON + ) ) { return objectParser.apply(parser, null); } catch (IOException e) { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearningAdOnly.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearningAdOnly.java index 3ff3a4a404f97..175a035a70f7e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearningAdOnly.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearningAdOnly.java @@ -14,6 +14,6 @@ public class LocalStateMachineLearningAdOnly extends LocalStateMachineLearning { public LocalStateMachineLearningAdOnly(final Settings settings, final Path configPath) { - super(settings, configPath, new MlTestExtensionLoader(new MlTestExtension(true, true, true, false, false))); + super(settings, configPath, new MlTestExtensionLoader(new MlTestExtension(true, true, true, false, false, false))); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearningDfaOnly.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearningDfaOnly.java index 1a72f27865d8a..f054e52dc29ec 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearningDfaOnly.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearningDfaOnly.java @@ -14,6 +14,6 @@ public class LocalStateMachineLearningDfaOnly extends LocalStateMachineLearning { public LocalStateMachineLearningDfaOnly(final Settings settings, final Path configPath) { - super(settings, configPath, new MlTestExtensionLoader(new MlTestExtension(true, true, false, true, false))); + super(settings, configPath, new MlTestExtensionLoader(new MlTestExtension(true, true, false, true, false, false))); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearningNlpOnly.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearningNlpOnly.java index 0f11e8033b83d..a3d684011e932 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearningNlpOnly.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/LocalStateMachineLearningNlpOnly.java @@ -14,6 +14,6 @@ public class LocalStateMachineLearningNlpOnly extends LocalStateMachineLearning { public LocalStateMachineLearningNlpOnly(final Settings settings, final Path configPath) { - super(settings, configPath, new MlTestExtensionLoader(new MlTestExtension(true, true, false, false, true))); + super(settings, configPath, new MlTestExtensionLoader(new MlTestExtension(true, true, false, false, true, false))); } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningInfoTransportActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningInfoTransportActionTests.java index 08568bd3e0d1e..084a9d95939c5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningInfoTransportActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningInfoTransportActionTests.java @@ -160,7 +160,14 @@ private MachineLearningUsageTransportAction newUsageAction( licenseState, jobManagerHolder, new MachineLearningExtensionHolder( - new MachineLearningTests.MlTestExtension(true, true, isAnomalyDetectionEnabled, isDataFrameAnalyticsEnabled, isNlpEnabled) + new MachineLearningTests.MlTestExtension( + true, + true, + isAnomalyDetectionEnabled, + isDataFrameAnalyticsEnabled, + isNlpEnabled, + true + ) ) ); } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTests.java index 84cef907cd093..f5f81a5ca15f3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MachineLearningTests.java @@ -221,9 +221,8 @@ public void testNoAttributes_givenClash() throws IOException { public void testAnomalyDetectionOnly() throws IOException { Settings settings = Settings.builder().put("path.home", createTempDir()).build(); - try (MachineLearning machineLearning = createTrialLicensedMachineLearning(settings)) { - MlTestExtensionLoader loader = new MlTestExtensionLoader(new MlTestExtension(false, false, true, false, false)); - machineLearning.loadExtensions(loader); + MlTestExtensionLoader loader = new MlTestExtensionLoader(new MlTestExtension(false, false, true, false, false, false)); + try (MachineLearning machineLearning = createTrialLicensedMachineLearning(settings, loader)) { List restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null); assertThat(restHandlers, hasItem(instanceOf(RestMlInfoAction.class))); assertThat(restHandlers, hasItem(instanceOf(RestGetJobsAction.class))); @@ -242,9 +241,8 @@ public void testAnomalyDetectionOnly() throws IOException { public void testDataFrameAnalyticsOnly() throws IOException { Settings settings = Settings.builder().put("path.home", createTempDir()).build(); - try (MachineLearning machineLearning = createTrialLicensedMachineLearning(settings)) { - MlTestExtensionLoader loader = new MlTestExtensionLoader(new MlTestExtension(false, false, false, true, false)); - machineLearning.loadExtensions(loader); + MlTestExtensionLoader loader = new MlTestExtensionLoader(new MlTestExtension(false, false, false, true, false, false)); + try (MachineLearning machineLearning = createTrialLicensedMachineLearning(settings, loader)) { List restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null); assertThat(restHandlers, hasItem(instanceOf(RestMlInfoAction.class))); assertThat(restHandlers, not(hasItem(instanceOf(RestGetJobsAction.class)))); @@ -263,9 +261,8 @@ public void testDataFrameAnalyticsOnly() throws IOException { public void testNlpOnly() throws IOException { Settings settings = Settings.builder().put("path.home", createTempDir()).build(); - try (MachineLearning machineLearning = createTrialLicensedMachineLearning(settings)) { - MlTestExtensionLoader loader = new MlTestExtensionLoader(new MlTestExtension(false, false, false, false, true)); - machineLearning.loadExtensions(loader); + MlTestExtensionLoader loader = new MlTestExtensionLoader(new MlTestExtension(false, false, false, false, true, false)); + try (MachineLearning machineLearning = createTrialLicensedMachineLearning(settings, loader)) { List restHandlers = machineLearning.getRestHandlers(settings, null, null, null, null, null, null); assertThat(restHandlers, hasItem(instanceOf(RestMlInfoAction.class))); assertThat(restHandlers, not(hasItem(instanceOf(RestGetJobsAction.class)))); @@ -291,19 +288,22 @@ public static class MlTestExtension implements MachineLearningExtension { private final boolean isAnomalyDetectionEnabled; private final boolean isDataFrameAnalyticsEnabled; private final boolean isNlpEnabled; + private final boolean isLearningToRankEnabled; MlTestExtension( boolean useIlm, boolean includeNodeInfo, boolean isAnomalyDetectionEnabled, boolean isDataFrameAnalyticsEnabled, - boolean isNlpEnabled + boolean isNlpEnabled, + boolean isLearningToRankEnabled ) { this.useIlm = useIlm; this.includeNodeInfo = includeNodeInfo; this.isAnomalyDetectionEnabled = isAnomalyDetectionEnabled; this.isDataFrameAnalyticsEnabled = isDataFrameAnalyticsEnabled; this.isNlpEnabled = isNlpEnabled; + this.isLearningToRankEnabled = isLearningToRankEnabled; } @Override @@ -331,6 +331,11 @@ public boolean isNlpEnabled() { return isNlpEnabled; } + @Override + public boolean isLearningToRankEnabled() { + return isLearningToRankEnabled; + } + @Override public String[] getAnalyticsDestIndexAllowedSettings() { return ANALYTICS_DEST_INDEX_ALLOWED_SETTINGS; @@ -377,6 +382,12 @@ protected XPackLicenseState getLicenseState() { } public static MachineLearning createTrialLicensedMachineLearning(Settings settings) { - return new TrialLicensedMachineLearning(settings); + return createTrialLicensedMachineLearning(settings, null); + } + + public static MachineLearning createTrialLicensedMachineLearning(Settings settings, MlTestExtensionLoader loader) { + MachineLearning mlPlugin = new TrialLicensedMachineLearning(settings); + mlPlugin.loadExtensions(loader); + return mlPlugin; } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlLifeCycleServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlLifeCycleServiceTests.java index a7a9122c96606..2b206de4cf42f 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlLifeCycleServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlLifeCycleServiceTests.java @@ -28,6 +28,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.core.ml.job.config.JobTaskState; import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeState; @@ -35,7 +36,6 @@ import org.elasticsearch.xpack.core.ml.job.snapshot.upgrade.SnapshotUpgradeTaskState; import org.elasticsearch.xpack.ml.datafeed.DatafeedRunner; import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsManager; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.job.process.autodetect.AutodetectProcessManager; import org.elasticsearch.xpack.ml.process.MlController; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetricsTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetricsTests.java new file mode 100644 index 0000000000000..2262c21070e75 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/MlMetricsTests.java @@ -0,0 +1,183 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml; + +import org.elasticsearch.client.internal.node.NodeClient; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.persistent.PersistentTasksCustomMetadata; +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.xpack.core.ml.MlTasks; +import org.elasticsearch.xpack.core.ml.action.StartDatafeedAction; +import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; +import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState; +import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsConfig; +import org.elasticsearch.xpack.core.ml.dataframe.DataFrameAnalyticsState; +import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; +import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; +import org.elasticsearch.xpack.core.ml.job.config.JobState; +import org.elasticsearch.xpack.ml.autoscaling.MlMemoryAutoscalingDeciderTests; +import org.elasticsearch.xpack.ml.dataframe.DataFrameAnalyticsManager; +import org.elasticsearch.xpack.ml.dataframe.persistence.DataFrameAnalyticsConfigProvider; +import org.elasticsearch.xpack.ml.dataframe.process.AnalyticsProcessManager; +import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; +import org.elasticsearch.xpack.ml.notifications.DataFrameAnalyticsAuditor; +import org.elasticsearch.xpack.ml.utils.persistence.ResultsPersisterService; + +import java.util.Map; + +import static org.elasticsearch.xpack.ml.MachineLearning.NATIVE_EXECUTABLE_CODE_OVERHEAD; +import static org.elasticsearch.xpack.ml.job.JobNodeSelector.AWAITING_LAZY_ASSIGNMENT; +import static org.hamcrest.Matchers.is; +import static org.mockito.Mockito.mock; + +public class MlMetricsTests extends ESTestCase { + + public void testFindTaskStatuses() { + + PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder(); + MlMemoryAutoscalingDeciderTests.addJobTask("job1", "node1", JobState.OPENED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addJobTask("job2", "node1", JobState.OPENED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addJobTask("job3", "node2", JobState.FAILED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addJobTask("job4", null, JobState.OPENING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addJobTask("job5", "node1", JobState.CLOSING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addJobTask("job6", "node2", JobState.OPENED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addJobTask("job7", "node2", JobState.OPENING, tasksBuilder); + addDatafeedTask("datafeed1", "node1", DatafeedState.STARTED, tasksBuilder); + addDatafeedTask("datafeed2", "node1", DatafeedState.STARTED, tasksBuilder); + addDatafeedTask("datafeed5", "node1", DatafeedState.STOPPING, tasksBuilder); + addDatafeedTask("datafeed6", "node2", DatafeedState.STARTED, tasksBuilder); + addDatafeedTask("datafeed7", "node2", DatafeedState.STARTING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa1", "node1", DataFrameAnalyticsState.STARTED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa2", "node2", DataFrameAnalyticsState.STARTED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa3", "node1", DataFrameAnalyticsState.FAILED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa4", "node2", DataFrameAnalyticsState.REINDEXING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa5", null, DataFrameAnalyticsState.STARTING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa6", "node1", DataFrameAnalyticsState.ANALYZING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa7", "node1", DataFrameAnalyticsState.STOPPING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa8", "node1", DataFrameAnalyticsState.STARTED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa9", null, DataFrameAnalyticsState.FAILED, tasksBuilder); + + MlMetrics.MlTaskStatusCounts counts = MlMetrics.findTaskStatuses(tasksBuilder.build()); + assertThat(counts.adOpeningCount(), is(2)); + assertThat(counts.adOpenedCount(), is(3)); + assertThat(counts.adClosingCount(), is(1)); + assertThat(counts.adFailedCount(), is(1)); + assertThat(counts.datafeedStartingCount(), is(1)); + assertThat(counts.datafeedStartedCount(), is(3)); + assertThat(counts.datafeedStoppingCount(), is(1)); + assertThat(counts.dfaStartingCount(), is(1)); + assertThat(counts.dfaStartedCount(), is(3)); + assertThat(counts.dfaReindexingCount(), is(1)); + assertThat(counts.dfaAnalyzingCount(), is(1)); + assertThat(counts.dfaStoppingCount(), is(1)); + assertThat(counts.dfaFailedCount(), is(2)); + } + + public void testFindDfaMemoryUsage() { + + PersistentTasksCustomMetadata.Builder tasksBuilder = PersistentTasksCustomMetadata.builder(); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa1", "node1", DataFrameAnalyticsState.STARTED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa2", "node2", DataFrameAnalyticsState.STARTED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa3", "node1", DataFrameAnalyticsState.FAILED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa4", "node2", DataFrameAnalyticsState.REINDEXING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa5", null, DataFrameAnalyticsState.STARTING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa6", "node1", DataFrameAnalyticsState.ANALYZING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa7", "node1", DataFrameAnalyticsState.STOPPING, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa8", "node1", DataFrameAnalyticsState.STARTED, tasksBuilder); + MlMemoryAutoscalingDeciderTests.addAnalyticsTask("dfa9", null, DataFrameAnalyticsState.FAILED, tasksBuilder); + + DataFrameAnalyticsManager dfaManager = new DataFrameAnalyticsManager( + Settings.EMPTY, + mock(NodeClient.class), + mock(ThreadPool.class), + mock(ClusterService.class), + mock(DataFrameAnalyticsConfigProvider.class), + mock(AnalyticsProcessManager.class), + mock(DataFrameAnalyticsAuditor.class), + mock(IndexNameExpressionResolver.class), + mock(ResultsPersisterService.class), + mock(ModelLoadingService.class), + new String[] {}, + Map.of( + "dfa1", + ByteSizeValue.ofGb(1), + "dfa3", + ByteSizeValue.ofGb(2), + "dfa6", + ByteSizeValue.ofGb(4), + "dfa7", + ByteSizeValue.ofGb(8), + "dfa8", + ByteSizeValue.ofGb(16) + ) + ); + + long bytesUsed = MlMetrics.findDfaMemoryUsage(dfaManager, tasksBuilder.build()); + assertThat(bytesUsed, is(ByteSizeValue.ofGb(29).getBytes() + 4 * DataFrameAnalyticsConfig.PROCESS_MEMORY_OVERHEAD.getBytes())); + } + + public void testFindTrainedModelAllocationCounts() { + + TrainedModelAssignmentMetadata.Builder metadataBuilder = TrainedModelAssignmentMetadata.Builder.empty(); + metadataBuilder.addNewAssignment( + "model1", + TrainedModelAssignment.Builder.empty(mock(StartTrainedModelDeploymentAction.TaskParams.class)) + .addRoutingEntry("node1", new RoutingInfo(1, 1, RoutingState.STARTED, "")) + .addRoutingEntry("node2", new RoutingInfo(0, 1, RoutingState.FAILED, "")) + ); + metadataBuilder.addNewAssignment( + "model2", + TrainedModelAssignment.Builder.empty(mock(StartTrainedModelDeploymentAction.TaskParams.class)) + .addRoutingEntry("node1", new RoutingInfo(2, 2, RoutingState.STARTED, "")) + ); + metadataBuilder.addNewAssignment( + "model3", + TrainedModelAssignment.Builder.empty(mock(StartTrainedModelDeploymentAction.TaskParams.class)) + .addRoutingEntry("node2", new RoutingInfo(0, 1, RoutingState.STARTING, "")) + ); + + MlMetrics.TrainedModelAllocationCounts counts = MlMetrics.findTrainedModelAllocationCounts(metadataBuilder.build()); + assertThat(counts.trainedModelsTargetAllocations(), is(5)); + assertThat(counts.trainedModelsCurrentAllocations(), is(3)); + assertThat(counts.trainedModelsFailedAllocations(), is(1)); + } + + public void testFindNativeMemoryFree() { + + long bytesFree = MlMetrics.findNativeMemoryFree( + ByteSizeValue.ofMb(4000).getBytes(), + ByteSizeValue.ofMb(500).getBytes(), + ByteSizeValue.ofMb(1000).getBytes(), + ByteSizeValue.ofMb(2000).getBytes() + ); + assertThat(bytesFree, is(ByteSizeValue.ofMb(500).getBytes() - NATIVE_EXECUTABLE_CODE_OVERHEAD.getBytes())); + } + + public static void addDatafeedTask( + String datafeedId, + String nodeId, + DatafeedState datafeedState, + PersistentTasksCustomMetadata.Builder builder + ) { + builder.addTask( + MlTasks.datafeedTaskId(datafeedId), + MlTasks.DATAFEED_TASK_NAME, + new StartDatafeedAction.DatafeedParams(datafeedId, System.currentTimeMillis()), + nodeId == null ? AWAITING_LAZY_ASSIGNMENT : new PersistentTasksCustomMetadata.Assignment(nodeId, "test assignment") + ); + if (datafeedState != null) { + builder.updateTaskState(MlTasks.datafeedTaskId(datafeedId), datafeedState); + } + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java index c54ac8ba3b84d..bf6d13ada0f94 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportGetTrainedModelsStatsActionTests.java @@ -28,7 +28,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ml.MachineLearningField; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.junit.Before; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregatorTests.java index 196584e4a7ce2..cb5b98af29d57 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/CategorizeTextAggregatorTests.java @@ -30,6 +30,7 @@ import org.elasticsearch.search.aggregations.metrics.Min; import org.elasticsearch.search.aggregations.metrics.MinAggregationBuilder; import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MachineLearningTests; import java.io.IOException; import java.util.Arrays; @@ -53,7 +54,7 @@ protected AnalysisModule createAnalysisModule() throws Exception { @Override protected List getSearchPlugins() { - return List.of(new MachineLearning(Settings.EMPTY)); + return List.of(MachineLearningTests.createTrialLicensedMachineLearning(Settings.EMPTY)); } private static final String TEXT_FIELD_NAME = "text"; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregationTests.java index 1f9119463994d..be8b72d26fd71 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/categorization/InternalCategorizationAggregationTests.java @@ -18,7 +18,7 @@ import org.elasticsearch.test.InternalMultiBucketAggregationTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xcontent.ParseField; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MachineLearningTests; import org.junit.After; import org.junit.Before; @@ -49,7 +49,7 @@ public void destroyHash() { @Override protected SearchPlugin registerPlugin() { - return new MachineLearning(Settings.EMPTY); + return MachineLearningTests.createTrialLicensedMachineLearning(Settings.EMPTY); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregatorTests.java index 410fc474a503f..442641db8c4ed 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/changepoint/ChangePointAggregatorTests.java @@ -23,7 +23,7 @@ import org.elasticsearch.search.aggregations.bucket.filter.InternalFilter; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MachineLearningTests; import java.io.IOException; import java.util.Arrays; @@ -40,7 +40,7 @@ public class ChangePointAggregatorTests extends AggregatorTestCase { @Override protected List getSearchPlugins() { - return List.of(new MachineLearning(Settings.EMPTY)); + return List.of(MachineLearningTests.createTrialLicensedMachineLearning(Settings.EMPTY)); } private static final DateHistogramInterval INTERVAL = DateHistogramInterval.minutes(1); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregatorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregatorTests.java index 5a3c8fcc5f7c7..93ef45285b60e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregatorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/FrequentItemSetsAggregatorTests.java @@ -32,7 +32,7 @@ import org.elasticsearch.search.aggregations.support.CoreValuesSourceType; import org.elasticsearch.search.aggregations.support.MultiValuesSourceFieldConfig; import org.elasticsearch.search.aggregations.support.ValuesSourceType; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MachineLearningTests; import org.elasticsearch.xpack.ml.aggs.frequentitemsets.EclatMapReducer.EclatResult; import org.elasticsearch.xpack.ml.aggs.frequentitemsets.FrequentItemSetCollector.FrequentItemSet; import org.elasticsearch.xpack.ml.aggs.frequentitemsets.mr.InternalItemSetMapReduceAggregation; @@ -66,7 +66,7 @@ public class FrequentItemSetsAggregatorTests extends AggregatorTestCase { @Override protected List getSearchPlugins() { - return List.of(new MachineLearning(Settings.EMPTY)); + return List.of(MachineLearningTests.createTrialLicensedMachineLearning(Settings.EMPTY)); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/InternalItemSetMapReduceAggregationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/InternalItemSetMapReduceAggregationTests.java index 93ee8bec974a7..ccaa6b4f0f4ec 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/InternalItemSetMapReduceAggregationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/frequentitemsets/mr/InternalItemSetMapReduceAggregationTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentParser; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MachineLearningTests; import org.elasticsearch.xpack.ml.aggs.frequentitemsets.mr.InternalItemSetMapReduceAggregationTests.WordCountMapReducer.WordCounts; import org.elasticsearch.xpack.ml.aggs.frequentitemsets.mr.ItemSetMapReduceValueSource.Field; import org.elasticsearch.xpack.ml.aggs.frequentitemsets.mr.ItemSetMapReduceValueSource.ValueFormatter; @@ -247,7 +247,7 @@ protected void assertFromXContent( @Override protected SearchPlugin registerPlugin() { - return new MachineLearning(Settings.EMPTY); + return MachineLearningTests.createTrialLicensedMachineLearning(Settings.EMPTY); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/heuristic/PValueScoreTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/heuristic/PValueScoreTests.java index 17898d7205b66..a6de69b684e12 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/heuristic/PValueScoreTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/heuristic/PValueScoreTests.java @@ -17,6 +17,7 @@ import org.elasticsearch.search.aggregations.bucket.terms.heuristic.SignificanceHeuristic; import org.elasticsearch.xcontent.NamedXContentRegistry; import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MachineLearningTests; import java.util.List; import java.util.function.Function; @@ -56,16 +57,14 @@ public void testAssertions() { @Override protected NamedXContentRegistry xContentRegistry() { - return new NamedXContentRegistry( - new SearchModule(Settings.EMPTY, List.of(new MachineLearning(Settings.EMPTY))).getNamedXContents() - ); + MachineLearning mlPlugin = MachineLearningTests.createTrialLicensedMachineLearning(Settings.EMPTY); + return new NamedXContentRegistry(new SearchModule(Settings.EMPTY, List.of(mlPlugin)).getNamedXContents()); } @Override protected NamedWriteableRegistry writableRegistry() { - return new NamedWriteableRegistry( - new SearchModule(Settings.EMPTY, List.of(new MachineLearning(Settings.EMPTY))).getNamedWriteables() - ); + MachineLearning mlPlugin = MachineLearningTests.createTrialLicensedMachineLearning(Settings.EMPTY); + return new NamedWriteableRegistry(new SearchModule(Settings.EMPTY, List.of(mlPlugin)).getNamedWriteables()); } public void testPValueScore_WhenAllDocsContainTerm() { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilderTests.java index 4aed2261b89ef..8f1ef07b554da 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/inference/InferencePipelineAggregationBuilderTests.java @@ -23,10 +23,9 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RegressionConfigUpdate; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.RegressionConfigUpdateTests; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ResultsFieldUpdate; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MachineLearningTests; import org.elasticsearch.xpack.ml.inference.loadingservice.ModelLoadingService; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.function.Function; @@ -43,7 +42,7 @@ public class InferencePipelineAggregationBuilderTests extends BasePipelineAggreg @Override protected List plugins() { - return Collections.singletonList(new MachineLearning(Settings.EMPTY)); + return List.of(MachineLearningTests.createTrialLicensedMachineLearning(Settings.EMPTY)); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/inference/InternalInferenceAggregationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/inference/InternalInferenceAggregationTests.java index 016a89fe4e4b7..fc1858167e7d7 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/inference/InternalInferenceAggregationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/inference/InternalInferenceAggregationTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.xpack.core.ml.inference.results.TopClassEntry; import org.elasticsearch.xpack.core.ml.inference.results.WarningInferenceResults; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MachineLearningTests; import java.util.Arrays; import java.util.Collections; @@ -40,7 +40,7 @@ public class InternalInferenceAggregationTests extends InternalAggregationTestCa @Override protected SearchPlugin registerPlugin() { - return new MachineLearning(Settings.EMPTY); + return MachineLearningTests.createTrialLicensedMachineLearning(Settings.EMPTY); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/kstest/InternalKSTestAggregationTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/kstest/InternalKSTestAggregationTests.java index 2085e44925cc9..f064b37c1fdec 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/kstest/InternalKSTestAggregationTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/aggs/kstest/InternalKSTestAggregationTests.java @@ -14,7 +14,7 @@ import org.elasticsearch.search.aggregations.ParsedAggregation; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.xcontent.NamedXContentRegistry; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MachineLearningTests; import java.util.Arrays; import java.util.List; @@ -28,7 +28,7 @@ public class InternalKSTestAggregationTests extends InternalAggregationTestCase< @Override protected SearchPlugin registerPlugin() { - return new MachineLearning(Settings.EMPTY); + return MachineLearningTests.createTrialLicensedMachineLearning(Settings.EMPTY); } @Override diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDeciderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDeciderTests.java index a56ad515690cf..97fd66e284010 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDeciderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/autoscaling/MlProcessorAutoscalingDeciderTests.java @@ -25,8 +25,8 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.junit.Before; import java.util.Map; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java index 0a54b97cf2f2a..f08d2735be8a5 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentClusterServiceTests.java @@ -58,6 +58,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingStateAndReason; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.autoscaling.NodeAvailabilityZoneMapper; @@ -114,7 +115,7 @@ public class TrainedModelAssignmentClusterServiceTests extends ESTestCase { private NodeAvailabilityZoneMapper nodeAvailabilityZoneMapper; private Client client; private static MockAppender appender; - private static Logger testLogger1 = LogManager.getLogger(TrainedModelAssignmentClusterService.class); + private static final Logger testLogger1 = LogManager.getLogger(TrainedModelAssignmentClusterService.class); @Before public void setupObjects() throws IllegalAccessException { @@ -546,6 +547,43 @@ public void testCreateAssignmentWhileResetModeIsTrue() throws InterruptedExcepti latch.await(); } + public void testHaveMlNodesChanged_ReturnsFalseWhenPreviouslyShuttingDownNode_IsMarkedAsReturning_ButIsNotAPresentNode() { + String model1 = "model-1"; + String shuttingDownNode = "ml-shutting-down-node"; + String mlNode1 = "ml-node-with-room"; + + ClusterState stateWithShuttingDownNodeAndMlNode1 = createClusterState( + List.of(shuttingDownNode, mlNode1), + Metadata.builder() + .putCustom( + TrainedModelAssignmentMetadata.NAME, + TrainedModelAssignmentMetadata.Builder.empty() + .addNewAssignment( + model1, + TrainedModelAssignment.Builder.empty(newParams(model1, 100)) + .addRoutingEntry(mlNode1, new RoutingInfo(1, 1, RoutingState.STARTING, "")) + ) + .build() + ) + .putCustom(NodesShutdownMetadata.TYPE, shutdownMetadata(shuttingDownNode)) + .build() + ); + + ClusterState stateWithMlNode1 = ClusterState.builder(stateWithShuttingDownNodeAndMlNode1) + .nodes(DiscoveryNodes.builder(stateWithShuttingDownNodeAndMlNode1.nodes()).remove(shuttingDownNode).build()) + .metadata( + Metadata.builder(stateWithShuttingDownNodeAndMlNode1.metadata()) + .putCustom(NodesShutdownMetadata.TYPE, NodesShutdownMetadata.EMPTY) + .build() + ) + .build(); + + var shutdownEvent = new ClusterChangedEvent("test", stateWithMlNode1, stateWithShuttingDownNodeAndMlNode1); + var metadata = TrainedModelAssignmentMetadata.fromState(shutdownEvent.state()); + + assertFalse(TrainedModelAssignmentClusterService.haveMlNodesChanged(shutdownEvent, metadata)); + } + public void testHaveMlNodesChanged_ReturnsTrueWhenNodeShutsDownAndWasRoutedTo() { String model1 = "model-1"; String mlNode1 = "ml-node-with-room"; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentMetadataTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentMetadataTests.java index 3057da83d11e9..6c5223eae4d99 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentMetadataTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentMetadataTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.core.ml.action.StartTrainedModelDeploymentAction; import org.elasticsearch.xpack.core.ml.inference.assignment.Priority; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentTests; import java.io.IOException; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java index 795f184a49a4d..2444134ce2920 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentNodeServiceTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.inference.deployment.DeploymentManager; import org.elasticsearch.xpack.ml.inference.deployment.TrainedModelDeploymentTask; import org.junit.After; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancerTests.java index 334fdfbb8b922..53b737b38c284 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/assignment/TrainedModelAssignmentRebalancerTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.MachineLearning; import org.elasticsearch.xpack.ml.job.NodeLoad; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingServiceTests.java index 2f4640cfa38dc..40b0dd519f7d8 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/inference/loadingservice/ModelLoadingServiceTests.java @@ -39,6 +39,7 @@ import org.elasticsearch.xcontent.XContentFactory; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.ml.action.GetTrainedModelsAction; +import org.elasticsearch.xpack.core.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.core.ml.inference.TrainedModelConfig; import org.elasticsearch.xpack.core.ml.inference.TrainedModelInput; import org.elasticsearch.xpack.core.ml.inference.trainedmodel.ClassificationConfig; @@ -46,7 +47,6 @@ import org.elasticsearch.xpack.core.ml.inference.trainedmodel.inference.InferenceDefinition; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.ModelAliasMetadata; import org.elasticsearch.xpack.ml.inference.TrainedModelStatsService; import org.elasticsearch.xpack.ml.inference.ingest.InferenceProcessor; import org.elasticsearch.xpack.ml.inference.persistence.TrainedModelProvider; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/NodeLoadDetectorTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/NodeLoadDetectorTests.java index ccc7f14d2264e..fef9b07429702 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/NodeLoadDetectorTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/NodeLoadDetectorTests.java @@ -21,9 +21,9 @@ import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingInfo; import org.elasticsearch.xpack.core.ml.inference.assignment.RoutingState; import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignment; +import org.elasticsearch.xpack.core.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.core.ml.job.config.JobState; import org.elasticsearch.xpack.ml.MachineLearning; -import org.elasticsearch.xpack.ml.inference.assignment.TrainedModelAssignmentMetadata; import org.elasticsearch.xpack.ml.job.task.OpenJobPersistentTasksExecutorTests; import org.elasticsearch.xpack.ml.process.MlMemoryTracker; import org.junit.Before; diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java index aff3f006b1a8a..7a314b82024be 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/process/autodetect/AutodetectProcessManagerTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.common.util.concurrent.ThreadContext; @@ -53,6 +54,7 @@ import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.DataCounts; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats; +import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSizeStats.AssignmentMemoryBasis; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.Quantiles; import org.elasticsearch.xpack.ml.MachineLearning; @@ -815,6 +817,35 @@ public void testCreate_givenNonZeroCountsAndNoModelSnapshotNorQuantiles() { verifyNoMoreInteractions(auditor); } + public void testGetOpenProcessMemoryUsage() { + modelSnapshot = null; + quantiles = null; + dataCounts = new DataCounts("foo"); + dataCounts.setLatestRecordTimeStamp(new Date(0L)); + dataCounts.incrementProcessedRecordCount(42L); + long modelMemoryLimitBytes = ByteSizeValue.ofMb(randomIntBetween(10, 1000)).getBytes(); + long peakModelBytes = randomLongBetween(100000, modelMemoryLimitBytes - 1); + long modelBytes = randomLongBetween(1, peakModelBytes - 1); + AssignmentMemoryBasis assignmentMemoryBasis = randomFrom(AssignmentMemoryBasis.values()); + modelSizeStats = new ModelSizeStats.Builder("foo").setModelBytesMemoryLimit(modelMemoryLimitBytes) + .setPeakModelBytes(peakModelBytes) + .setModelBytes(modelBytes) + .setAssignmentMemoryBasis(assignmentMemoryBasis) + .build(); + when(autodetectCommunicator.getModelSizeStats()).thenReturn(modelSizeStats); + AutodetectProcessManager manager = createSpyManager(); + JobTask jobTask = mock(JobTask.class); + when(jobTask.getJobId()).thenReturn("foo"); + manager.openJob(jobTask, clusterState, DEFAULT_MASTER_NODE_TIMEOUT, (e, b) -> {}); + + long expectedSizeBytes = Job.PROCESS_MEMORY_OVERHEAD.getBytes() + switch (assignmentMemoryBasis) { + case MODEL_MEMORY_LIMIT -> modelMemoryLimitBytes; + case CURRENT_MODEL_BYTES -> modelBytes; + case PEAK_MODEL_BYTES -> peakModelBytes; + }; + assertThat(manager.getOpenProcessMemoryUsage(), equalTo(ByteSizeValue.ofBytes(expectedSizeBytes))); + } + private AutodetectProcessManager createNonSpyManager(String jobId) { ExecutorService executorService = mock(ExecutorService.class); when(threadPool.executor(anyString())).thenReturn(executorService); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java index 5e414a7f997d5..13f12f3cdc1e1 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TextExpansionQueryBuilderTests.java @@ -7,10 +7,15 @@ package org.elasticsearch.xpack.ml.queries; +import org.apache.lucene.document.Document; import org.apache.lucene.document.FeatureField; +import org.apache.lucene.document.FloatDocValuesField; +import org.apache.lucene.index.IndexReader; import org.apache.lucene.search.BooleanClause; import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionType; @@ -20,6 +25,7 @@ import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.mapper.extras.MapperExtrasPlugin; +import org.elasticsearch.index.query.BoolQueryBuilder; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.SearchExecutionContext; import org.elasticsearch.plugins.Plugin; @@ -36,6 +42,7 @@ import java.util.Collection; import java.util.List; +import static org.elasticsearch.xpack.ml.queries.WeightedTokensQueryBuilder.TOKENS_FIELD; import static org.hamcrest.CoreMatchers.instanceOf; import static org.hamcrest.Matchers.either; import static org.hamcrest.Matchers.hasSize; @@ -43,11 +50,19 @@ public class TextExpansionQueryBuilderTests extends AbstractQueryTestCase { private static final String RANK_FEATURES_FIELD = "rank"; - private static int NUM_TOKENS = 10; + private static final int NUM_TOKENS = 10; @Override protected TextExpansionQueryBuilder doCreateTestQueryBuilder() { - var builder = new TextExpansionQueryBuilder(RANK_FEATURES_FIELD, randomAlphaOfLength(4), randomAlphaOfLength(4)); + TokenPruningConfig tokenPruningConfig = randomBoolean() + ? new TokenPruningConfig(randomIntBetween(1, 100), randomFloat(), randomBoolean()) + : null; + var builder = new TextExpansionQueryBuilder( + RANK_FEATURES_FIELD, + randomAlphaOfLength(4), + randomAlphaOfLength(4), + tokenPruningConfig + ); if (randomBoolean()) { builder.boost((float) randomDoubleBetween(0.1, 10.0, true)); } @@ -126,6 +141,44 @@ protected void doAssertLuceneQuery(TextExpansionQueryBuilder queryBuilder, Query } } + /** + * Overridden to ensure that {@link SearchExecutionContext} has a non-null {@link IndexReader} + */ + @Override + public void testCacheability() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + Document document = new Document(); + document.add(new FloatDocValuesField(RANK_FEATURES_FIELD, 1.0f)); + iw.addDocument(document); + try (IndexReader reader = iw.getReader()) { + SearchExecutionContext context = createSearchExecutionContext(newSearcher(reader)); + TextExpansionQueryBuilder queryBuilder = createTestQueryBuilder(); + QueryBuilder rewriteQuery = rewriteQuery(queryBuilder, new SearchExecutionContext(context)); + + assertNotNull(rewriteQuery.toQuery(context)); + assertTrue("query should be cacheable: " + queryBuilder.toString(), context.isCacheable()); + } + } + } + + /** + * Overridden to ensure that {@link SearchExecutionContext} has a non-null {@link IndexReader}; this query should always be rewritten + */ + @Override + public void testToQuery() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + Document document = new Document(); + document.add(new FloatDocValuesField(RANK_FEATURES_FIELD, 1.0f)); + iw.addDocument(document); + try (IndexReader reader = iw.getReader()) { + SearchExecutionContext context = createSearchExecutionContext(newSearcher(reader)); + TextExpansionQueryBuilder queryBuilder = createTestQueryBuilder(); + IllegalStateException e = expectThrows(IllegalStateException.class, () -> queryBuilder.toQuery(context)); + assertEquals("text_expansion should have been rewritten to another query type", e.getMessage()); + } + } + } + public void testIllegalValues() { { IllegalArgumentException e = expectThrows( @@ -162,4 +215,55 @@ public void testToXContent() throws IOException { } }""", query); } + + public void testToXContentWithThresholds() throws IOException { + QueryBuilder query = new TextExpansionQueryBuilder("foo", "bar", "baz", new TokenPruningConfig(4, 0.3f, false)); + checkGeneratedJson(""" + { + "text_expansion": { + "foo": { + "model_text": "bar", + "model_id": "baz", + "pruning_config": { + "tokens_freq_ratio_threshold": 4.0, + "tokens_weight_threshold": 0.3 + } + } + } + }""", query); + } + + public void testToXContentWithThresholdsAndOnlyScorePrunedTokens() throws IOException { + QueryBuilder query = new TextExpansionQueryBuilder("foo", "bar", "baz", new TokenPruningConfig(4, 0.3f, true)); + checkGeneratedJson(""" + { + "text_expansion": { + "foo": { + "model_text": "bar", + "model_id": "baz", + "pruning_config": { + "tokens_freq_ratio_threshold": 4.0, + "tokens_weight_threshold": 0.3, + "only_score_pruned_tokens": true + } + } + } + }""", query); + } + + @Override + protected String[] shuffleProtectedFields() { + return new String[] { TOKENS_FIELD.getPreferredName() }; + } + + public void testThatTokensAreCorrectlyPruned() { + SearchExecutionContext searchExecutionContext = createSearchExecutionContext(); + TextExpansionQueryBuilder queryBuilder = createTestQueryBuilder(); + QueryBuilder rewrittenQueryBuilder = rewriteAndFetch(queryBuilder, searchExecutionContext); + if (queryBuilder.getTokenPruningConfig() == null) { + assertTrue(rewrittenQueryBuilder instanceof BoolQueryBuilder); + } else { + assertTrue(rewrittenQueryBuilder instanceof WeightedTokensQueryBuilder); + } + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TokenPruningConfigTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TokenPruningConfigTests.java new file mode 100644 index 0000000000000..3f38a2ee891d5 --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/TokenPruningConfigTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.queries; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.test.AbstractXContentSerializingTestCase; +import org.elasticsearch.xcontent.XContentParser; + +import java.io.IOException; + +public class TokenPruningConfigTests extends AbstractXContentSerializingTestCase { + + public static TokenPruningConfig testInstance() { + return new TokenPruningConfig(randomIntBetween(1, 100), randomFloat(), randomBoolean()); + } + + @Override + protected Writeable.Reader instanceReader() { + return TokenPruningConfig::new; + } + + @Override + protected TokenPruningConfig createTestInstance() { + return testInstance(); + } + + @Override + protected TokenPruningConfig mutateInstance(TokenPruningConfig instance) throws IOException { + return null; + } + + @Override + protected TokenPruningConfig doParseInstance(XContentParser parser) throws IOException { + return TokenPruningConfig.fromXContent(parser); + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilderTests.java new file mode 100644 index 0000000000000..4d91c66de4b9e --- /dev/null +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/queries/WeightedTokensQueryBuilderTests.java @@ -0,0 +1,439 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.ml.queries; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.FeatureField; +import org.apache.lucene.document.FloatDocValuesField; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.search.BooleanClause; +import org.apache.lucene.search.BooleanQuery; +import org.apache.lucene.search.BoostQuery; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.store.Directory; +import org.apache.lucene.tests.index.RandomIndexWriter; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionType; +import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.client.internal.Client; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.compress.CompressedXContent; +import org.elasticsearch.index.mapper.MapperService; +import org.elasticsearch.index.mapper.extras.MapperExtrasPlugin; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.SearchExecutionContext; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.test.AbstractQueryTestCase; +import org.elasticsearch.xpack.core.ml.action.InferModelAction; +import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; +import org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults; +import org.elasticsearch.xpack.ml.MachineLearning; + +import java.io.IOException; +import java.lang.reflect.Method; +import java.util.Collection; +import java.util.List; + +import static org.elasticsearch.xpack.core.ml.inference.results.TextExpansionResults.WeightedToken; +import static org.elasticsearch.xpack.ml.queries.WeightedTokensQueryBuilder.TOKENS_FIELD; +import static org.hamcrest.CoreMatchers.equalTo; +import static org.hamcrest.CoreMatchers.instanceOf; +import static org.hamcrest.Matchers.either; +import static org.hamcrest.Matchers.hasSize; + +public class WeightedTokensQueryBuilderTests extends AbstractQueryTestCase { + + private static final String RANK_FEATURES_FIELD = "rank"; + private static final List WEIGHTED_TOKENS = List.of(new TextExpansionResults.WeightedToken("foo", .42f)); + private static final int NUM_TOKENS = WEIGHTED_TOKENS.size(); + + @Override + protected WeightedTokensQueryBuilder doCreateTestQueryBuilder() { + return createTestQueryBuilder(randomBoolean()); + } + + private WeightedTokensQueryBuilder createTestQueryBuilder(boolean onlyScorePrunedTokens) { + TokenPruningConfig tokenPruningConfig = randomBoolean() + ? new TokenPruningConfig(randomIntBetween(1, 100), randomFloat(), onlyScorePrunedTokens) + : null; + + var builder = new WeightedTokensQueryBuilder(RANK_FEATURES_FIELD, WEIGHTED_TOKENS, tokenPruningConfig); + if (randomBoolean()) { + builder.boost((float) randomDoubleBetween(0.1, 10.0, true)); + } + if (randomBoolean()) { + builder.queryName(randomAlphaOfLength(4)); + } + return builder; + } + + @Override + protected Collection> getPlugins() { + return List.of(MachineLearning.class, MapperExtrasPlugin.class); + } + + @Override + protected boolean canSimulateMethod(Method method, Object[] args) throws NoSuchMethodException { + return method.equals(Client.class.getMethod("execute", ActionType.class, ActionRequest.class, ActionListener.class)) + && (args[0] instanceof InferModelAction); + } + + @Override + protected Object simulateMethod(Method method, Object[] args) { + InferModelAction.Request request = (InferModelAction.Request) args[1]; + assertEquals(InferModelAction.Request.DEFAULT_TIMEOUT_FOR_API, request.getInferenceTimeout()); + assertEquals(TrainedModelPrefixStrings.PrefixType.SEARCH, request.getPrefixType()); + + // Randomisation of tokens cannot be used here as {@code #doAssertLuceneQuery} + // asserts that 2 rewritten queries are the same + var response = InferModelAction.Response.builder() + .setId(request.getId()) + .addInferenceResults(List.of(new TextExpansionResults("foo", WEIGHTED_TOKENS.stream().toList(), randomBoolean()))) + .build(); + @SuppressWarnings("unchecked") // We matched the method above. + ActionListener listener = (ActionListener) args[2]; + listener.onResponse(response); + return null; + } + + @Override + protected void initializeAdditionalMappings(MapperService mapperService) throws IOException { + mapperService.merge( + "_doc", + new CompressedXContent(Strings.toString(PutMappingRequest.simpleMapping(RANK_FEATURES_FIELD, "type=rank_features"))), + MapperService.MergeReason.MAPPING_UPDATE + ); + } + + /** + * Overridden to ensure that {@link SearchExecutionContext} has a non-null {@link IndexReader} + */ + @Override + public void testToQuery() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + // Index at least one document so we have a freq > 0 + Document document = new Document(); + document.add(new FeatureField(RANK_FEATURES_FIELD, "foo", 1.0f)); + iw.addDocument(document); + try (IndexReader reader = iw.getReader()) { + SearchExecutionContext context = createSearchExecutionContext(newSearcher(reader)); + // We need to force token pruning config here, to get repeatable lucene queries for comparison + WeightedTokensQueryBuilder firstQuery = createTestQueryBuilder(false); + WeightedTokensQueryBuilder controlQuery = copyQuery(firstQuery); + QueryBuilder rewritten = rewriteQuery(firstQuery, context); + Query firstLuceneQuery = rewritten.toQuery(context); + assertNotNull("toQuery should not return null", firstLuceneQuery); + assertLuceneQuery(firstQuery, firstLuceneQuery, context); + assertEquals( + "query is not equal to its copy after calling toQuery, firstQuery: " + firstQuery + ", secondQuery: " + controlQuery, + firstQuery, + controlQuery + ); + assertEquals( + "equals is not symmetric after calling toQuery, firstQuery: " + firstQuery + ", secondQuery: " + controlQuery, + controlQuery, + firstQuery + ); + assertThat( + "query copy's hashcode is different from original hashcode after calling toQuery, firstQuery: " + + firstQuery + + ", secondQuery: " + + controlQuery, + controlQuery.hashCode(), + equalTo(firstQuery.hashCode()) + ); + WeightedTokensQueryBuilder secondQuery = copyQuery(firstQuery); + + // query _name never should affect the result of toQuery, we randomly set it to make sure + if (randomBoolean()) { + secondQuery.queryName( + secondQuery.queryName() == null + ? randomAlphaOfLengthBetween(1, 30) + : secondQuery.queryName() + randomAlphaOfLengthBetween(1, 10) + ); + } + context = new SearchExecutionContext(context); + Query secondLuceneQuery = rewriteQuery(secondQuery, context).toQuery(context); + assertNotNull("toQuery should not return null", secondLuceneQuery); + assertLuceneQuery(secondQuery, secondLuceneQuery, context); + + if (builderGeneratesCacheableQueries()) { + assertEquals( + "two equivalent query builders lead to different lucene queries hashcode", + secondLuceneQuery.hashCode(), + firstLuceneQuery.hashCode() + ); + assertEquals( + "two equivalent query builders lead to different lucene queries", + rewrite(secondLuceneQuery), + rewrite(firstLuceneQuery) + ); + } + + if (supportsBoost() && firstLuceneQuery instanceof MatchNoDocsQuery == false) { + secondQuery.boost(firstQuery.boost() + 1f + randomFloat()); + Query thirdLuceneQuery = rewriteQuery(secondQuery, context).toQuery(context); + assertNotEquals( + "modifying the boost doesn't affect the corresponding lucene query", + rewrite(firstLuceneQuery), + rewrite(thirdLuceneQuery) + ); + } + + } + } + } + + public void testPruningIsAppliedCorrectly() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + List documents = List.of( + createDocument( + List.of("the", "quick", "brown", "fox", "jumped", "over", "lazy", "dog", "me"), + List.of(.2f, 1.8f, 1.75f, 5.9f, 1.6f, 1.4f, .4f, 4.8f, 2.1f) + ), + createDocument( + List.of("the", "rains", "in", "spain", "fall", "mainly", "on", "plain", "me"), + List.of(.1f, 3.6f, .1f, 4.8f, .6f, .3f, .1f, 2.6f, 2.1f) + ), + createDocument( + List.of("betty", "bought", "butter", "but", "the", "was", "bitter", "me"), + List.of(6.8f, 1.4f, .5f, 3.2f, .1f, 3.2f, .6f, 2.1f) + ), + createDocument( + List.of("she", "sells", "seashells", "by", "the", "seashore", "me"), + List.of(.2f, 1.4f, 5.9f, .1f, .1f, 3.6f, 2.1f) + ) + ); + iw.addDocuments(documents); + + List inputTokens = List.of( + new WeightedToken("the", .1f), // Will be pruned - score too low, freq too high + new WeightedToken("black", 5.3f), // Will be pruned - does not exist in index + new WeightedToken("dog", 7.5f), // Will be kept - high score and low freq + new WeightedToken("jumped", 4.5f), // Will be kept - high score and low freq + new WeightedToken("on", .1f), // Will be kept - low score but also low freq + new WeightedToken("me", 3.8f) // Will be kept - high freq but also high score + ); + try (IndexReader reader = iw.getReader()) { + SearchExecutionContext context = createSearchExecutionContext(newSearcher(reader)); + + WeightedTokensQueryBuilder noPruningQuery = new WeightedTokensQueryBuilder(RANK_FEATURES_FIELD, inputTokens, null); + Query query = noPruningQuery.doToQuery(context); + assertCorrectLuceneQuery("noPruningQuery", query, List.of("the", "black", "dog", "jumped", "on", "me")); + + WeightedTokensQueryBuilder queryThatShouldBePruned = new WeightedTokensQueryBuilder( + RANK_FEATURES_FIELD, + inputTokens, + new TokenPruningConfig(1.5f, 0.5f, false) + ); + query = queryThatShouldBePruned.doToQuery(context); + assertCorrectLuceneQuery("queryThatShouldBePruned", query, List.of("dog", "jumped", "on", "me")); + + WeightedTokensQueryBuilder onlyScorePrunedTokensQuery = new WeightedTokensQueryBuilder( + RANK_FEATURES_FIELD, + inputTokens, + new TokenPruningConfig(1.5f, 0.5f, true) + ); + query = onlyScorePrunedTokensQuery.doToQuery(context); + assertCorrectLuceneQuery("onlyScorePrunedTokensQuery", query, List.of("the", "black")); + } + } + } + + private void assertCorrectLuceneQuery(String name, Query query, List expectedFeatureFields) { + assertTrue(query instanceof BooleanQuery); + List booleanClauses = ((BooleanQuery) query).clauses(); + assertEquals( + name + " had " + booleanClauses.size() + " clauses, expected " + expectedFeatureFields.size(), + expectedFeatureFields.size(), + booleanClauses.size() + ); + for (int i = 0; i < booleanClauses.size(); i++) { + Query clauseQuery = booleanClauses.get(i).getQuery(); + assertTrue(name + " query " + query + " expected to be a BoostQuery", clauseQuery instanceof BoostQuery); + // FeatureQuery is not visible so we check the String representation + assertTrue(name + " query " + query + " expected to be a FeatureQuery", clauseQuery.toString().contains("FeatureQuery")); + assertTrue( + name + " query " + query + " expected to have field " + expectedFeatureFields.get(i), + clauseQuery.toString().contains("feature=" + expectedFeatureFields.get(i)) + ); + } + } + + private Document createDocument(List tokens, List weights) { + if (tokens.size() != weights.size()) { + throw new IllegalArgumentException( + "tokens and weights must have the same size. Got " + tokens.size() + " and " + weights.size() + "." + ); + } + Document document = new Document(); + for (int i = 0; i < tokens.size(); i++) { + document.add(new FeatureField(RANK_FEATURES_FIELD, tokens.get(i), weights.get(i))); + } + return document; + } + + /** + * Overridden to ensure that {@link SearchExecutionContext} has a non-null {@link IndexReader} + */ + @Override + public void testCacheability() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + Document document = new Document(); + document.add(new FloatDocValuesField(RANK_FEATURES_FIELD, 1.0f)); + iw.addDocument(document); + try (IndexReader reader = iw.getReader()) { + SearchExecutionContext context = createSearchExecutionContext(newSearcher(reader)); + WeightedTokensQueryBuilder queryBuilder = createTestQueryBuilder(); + QueryBuilder rewriteQuery = rewriteQuery(queryBuilder, new SearchExecutionContext(context)); + + assertNotNull(rewriteQuery.toQuery(context)); + assertTrue("query should be cacheable: " + queryBuilder.toString(), context.isCacheable()); + } + } + } + + /** + * Overridden to ensure that {@link SearchExecutionContext} has a non-null {@link IndexReader} + */ + @Override + public void testMustRewrite() throws IOException { + try (Directory directory = newDirectory(); RandomIndexWriter iw = new RandomIndexWriter(random(), directory)) { + Document document = new Document(); + document.add(new FloatDocValuesField(RANK_FEATURES_FIELD, 1.0f)); + iw.addDocument(document); + try (IndexReader reader = iw.getReader()) { + SearchExecutionContext context = createSearchExecutionContext(newSearcher(reader)); + context.setAllowUnmappedFields(true); + WeightedTokensQueryBuilder queryBuilder = createTestQueryBuilder(); + queryBuilder.toQuery(context); + } + } + } + + @Override + protected void doAssertLuceneQuery(WeightedTokensQueryBuilder queryBuilder, Query query, SearchExecutionContext context) { + assertThat(query, instanceOf(BooleanQuery.class)); + BooleanQuery booleanQuery = (BooleanQuery) query; + assertEquals(booleanQuery.getMinimumNumberShouldMatch(), 1); + assertThat(booleanQuery.clauses(), hasSize(NUM_TOKENS)); + + Class featureQueryClass = FeatureField.newLinearQuery("", "", 0.5f).getClass(); + // if the weight is 1.0f a BoostQuery is returned + Class boostQueryClass = FeatureField.newLinearQuery("", "", 1.0f).getClass(); + + for (var clause : booleanQuery.clauses()) { + assertEquals(BooleanClause.Occur.SHOULD, clause.getOccur()); + assertThat(clause.getQuery(), either(instanceOf(featureQueryClass)).or(instanceOf(boostQueryClass))); + } + } + + public void testIllegalValues() { + List weightedTokens = List.of(new WeightedToken("foo", 1.0f)); + { + NullPointerException e = expectThrows( + NullPointerException.class, + () -> new WeightedTokensQueryBuilder(null, weightedTokens, null) + ); + assertEquals("[weighted_tokens] requires a fieldName", e.getMessage()); + } + { + NullPointerException e = expectThrows( + NullPointerException.class, + () -> new WeightedTokensQueryBuilder("field name", null, null) + ); + assertEquals("[weighted_tokens] requires tokens", e.getMessage()); + } + { + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new WeightedTokensQueryBuilder("field name", List.of(), null) + ); + assertEquals("[weighted_tokens] requires at least one token", e.getMessage()); + } + { + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new WeightedTokensQueryBuilder("field name", weightedTokens, new TokenPruningConfig(-1f, 0.0f, false)) + ); + assertEquals("[tokens_freq_ratio_threshold] must be between [1.0] and [100.0], got -1.0", e.getMessage()); + } + { + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new WeightedTokensQueryBuilder("field name", weightedTokens, new TokenPruningConfig(101f, 0.0f, false)) + ); + assertEquals("[tokens_freq_ratio_threshold] must be between [1.0] and [100.0], got 101.0", e.getMessage()); + } + { + IllegalArgumentException e = expectThrows( + IllegalArgumentException.class, + () -> new WeightedTokensQueryBuilder("field name", weightedTokens, new TokenPruningConfig(5f, 5f, false)) + ); + assertEquals("[tokens_weight_threshold] must be between 0 and 1", e.getMessage()); + } + } + + public void testToXContent() throws Exception { + QueryBuilder query = new WeightedTokensQueryBuilder("foo", WEIGHTED_TOKENS, null); + checkGeneratedJson(""" + { + "weighted_tokens": { + "foo": { + "tokens": { + "foo": 0.42 + } + } + } + }""", query); + } + + public void testToXContentWithThresholds() throws Exception { + QueryBuilder query = new WeightedTokensQueryBuilder("foo", WEIGHTED_TOKENS, new TokenPruningConfig(4, 0.4f, false)); + checkGeneratedJson(""" + { + "weighted_tokens": { + "foo": { + "tokens": { + "foo": 0.42 + }, + "pruning_config": { + "tokens_freq_ratio_threshold": 4.0, + "tokens_weight_threshold": 0.4 + } + } + } + }""", query); + } + + public void testToXContentWithThresholdsAndOnlyScorePrunedTokens() throws Exception { + QueryBuilder query = new WeightedTokensQueryBuilder("foo", WEIGHTED_TOKENS, new TokenPruningConfig(4, 0.4f, true)); + checkGeneratedJson(""" + { + "weighted_tokens": { + "foo": { + "tokens": { + "foo": 0.42 + }, + "pruning_config": { + "tokens_freq_ratio_threshold": 4.0, + "tokens_weight_threshold": 0.4, + "only_score_pruned_tokens": true + } + } + } + }""", query); + } + + @Override + protected String[] shuffleProtectedFields() { + return new String[] { TOKENS_FIELD.getPreferredName() }; + } +} diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java index 9e2f14aaabd84..f8ffed0864372 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/utils/persistence/ResultsPersisterServiceTests.java @@ -33,6 +33,8 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.indices.IndexPrimaryShardNotAllocatedException; import org.elasticsearch.rest.RestStatus; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.ClientHelper; @@ -75,23 +77,28 @@ public class ResultsPersisterServiceTests extends ESTestCase { // Constants for searchWithRetry tests private static final SearchRequest SEARCH_REQUEST = new SearchRequest("my-index"); - private static final SearchResponse SEARCH_RESPONSE_SUCCESS = new SearchResponse( - null, + public static final SearchResponse SEARCH_RESPONSE_SUCCESS = SearchResponseUtils.emptyWithTotalHits( null, 1, 1, 0, - 0, + 1L, ShardSearchFailure.EMPTY_ARRAY, null ); - private static final SearchResponse SEARCH_RESPONSE_FAILURE = new SearchResponse( + public static final SearchResponse SEARCH_RESPONSE_FAILURE = new SearchResponse( + SearchHits.EMPTY_WITHOUT_TOTAL_HITS, + null, + null, + false, null, null, 1, + null, + 1, 0, 0, - 0, + 1L, ShardSearchFailure.EMPTY_ARRAY, null ); @@ -418,4 +425,5 @@ public static ResultsPersisterService buildResultsPersisterService(OriginSetting }).when(tp).schedule(any(Runnable.class), any(TimeValue.class), any(Executor.class)); return new ResultsPersisterService(tp, client, clusterService, Settings.EMPTY); } + } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java index a44aa9404f4f9..8575c7e1f4bf3 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/vectors/TextEmbeddingQueryVectorBuilderTests.java @@ -18,7 +18,7 @@ import org.elasticsearch.xpack.core.ml.action.InferModelAction; import org.elasticsearch.xpack.core.ml.inference.TrainedModelPrefixStrings; import org.elasticsearch.xpack.core.ml.inference.results.TextEmbeddingResults; -import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.MachineLearningTests; import java.io.IOException; import java.util.List; @@ -30,7 +30,7 @@ public class TextEmbeddingQueryVectorBuilderTests extends AbstractQueryVectorBui @Override protected List additionalPlugins() { - return List.of(new MachineLearning(Settings.EMPTY)); + return List.of(MachineLearningTests.createTrialLicensedMachineLearning(Settings.EMPTY)); } @Override diff --git a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java index 289f6896ed698..098023ad1841a 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java +++ b/x-pack/plugin/profiling/src/internalClusterTest/java/org/elasticsearch/xpack/profiling/GetStackTracesActionIT.java @@ -30,8 +30,8 @@ public void testGetStackTracesUnfiltered() throws Exception { assertEquals(18, stackTrace.fileIds.size()); assertEquals(18, stackTrace.frameIds.size()); assertEquals(18, stackTrace.typeIds.size()); - assertEquals(0.0000098789d, stackTrace.annualCO2Tons, 0.0000000001d); - assertEquals(0.093075d, stackTrace.annualCostsUSD, 0.000001d); + assertEquals(0.0000048475146d, stackTrace.annualCO2Tons, 0.0000000001d); + assertEquals(0.18834d, stackTrace.annualCostsUSD, 0.00001d); assertNotNull(response.getStackFrames()); StackFrame stackFrame = response.getStackFrames().get("8NlMClggx8jaziUTJXlmWAAAAAAAAIYI"); diff --git a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson index cd3ddc1271d2d..a830ef8da66f1 100644 --- a/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson +++ b/x-pack/plugin/profiling/src/internalClusterTest/resources/data/profiling-hosts.ndjson @@ -1,2 +1,2 @@ {"create": {"_index": "profiling-hosts", "_id": "eLH27YsBj2lLi3tJYlvr"}} -{"profiling.project.id": 100, "host.id": "8457605156473051743", "@timestamp": 1700504426, "ecs.version": "1.12.0", "profiling.agent.build_timestamp": 1688111067, "profiling.instance.private_ipv4s": ["192.168.1.2"], "ec2.instance_life_cycle": "on-demand", "profiling.agent.config.map_scale_factor": 0, "ec2.instance_type": "i3.2xlarge", "profiling.host.ip": "192.168.1.2", "profiling.agent.config.bpf_log_level": 0, "profiling.host.sysctl.net.core.bpf_jit_enable": 1, "profiling.agent.config.file": "/etc/prodfiler/prodfiler.conf", "ec2.local_ipv4": "192.168.1.2", "profiling.agent.config.no_kernel_version_check": false, "profiling.host.machine": "x86_64", ",profiling.host.tags": ["cloud_provider:aws", "cloud_environment:qa", "cloud_region:eu-west-1"], "profiling.agent.config.probabilistic_threshold": 100, "profiling.agent.config.disable_tls": false, "profiling.agent.config.tracers": "all", "profiling.agent.start_time": 1700090045589, "profiling.agent.config.max_elements_per_interval": 800, "ec2.placement.region": "eu-west-1", "profiling.agent.config.present_cpu_cores": 8, "profiling.host.kernel_version": "9.9.9-0-aws", "profiling.agent.config.bpf_log_size": 65536, "profiling.agent.config.known_traces_entries": 65536, "profiling.host.sysctl.kernel.unprivileged_bpf_disabled": 1, "profiling.agent.config.verbose": false, "profiling.agent.config.probabilistic_interval": "1m0s", "ec2.placement.availability_zone_id": "euw1-az1", "ec2.security_groups": "", "ec2.local_hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "ec2.placement.availability_zone": "eu-west-1c", "profiling.agent.config.upload_symbols": false, "profiling.host.sysctl.kernel.bpf_stats_enabled": 0, "profiling.host.name": "ip-192-168-1-2", "ec2.mac": "00:11:22:33:44:55", "profiling.host.kernel_proc_version": "Linux version 9.9.9-0-aws", "profiling.agent.config.cache_directory": "/var/cache/optimyze/", "profiling.agent.version": "v8.12.0", "ec2.hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "profiling.agent.config.elastic_mode": false, "ec2.ami_id": "ami-aaaaaaaaaaa", "ec2.instance_id": "i-0b999999999999999" } +{"profiling.project.id": 100, "host.id": "8457605156473051743", "@timestamp": 1700504426, "ecs.version": "1.12.0", "profiling.agent.build_timestamp": 1688111067, "profiling.instance.private_ipv4s": ["192.168.1.2"], "ec2.instance_life_cycle": "on-demand", "profiling.agent.config.map_scale_factor": 0, "ec2.instance_type": "i3.2xlarge", "profiling.host.ip": "192.168.1.2", "profiling.agent.config.bpf_log_level": 0, "profiling.host.sysctl.net.core.bpf_jit_enable": 1, "profiling.agent.config.file": "/etc/prodfiler/prodfiler.conf", "ec2.local_ipv4": "192.168.1.2", "profiling.agent.config.no_kernel_version_check": false, "profiling.host.machine": "x86_64", "profiling.host.tags": ["cloud_provider:aws", "cloud_environment:qa", "cloud_region:eu-west-1"], "profiling.agent.config.probabilistic_threshold": 100, "profiling.agent.config.disable_tls": false, "profiling.agent.config.tracers": "all", "profiling.agent.start_time": 1700090045589, "profiling.agent.config.max_elements_per_interval": 800, "ec2.placement.region": "eu-west-1", "profiling.agent.config.present_cpu_cores": 8, "profiling.host.kernel_version": "9.9.9-0-aws", "profiling.agent.config.bpf_log_size": 65536, "profiling.agent.config.known_traces_entries": 65536, "profiling.host.sysctl.kernel.unprivileged_bpf_disabled": 1, "profiling.agent.config.verbose": false, "profiling.agent.config.probabilistic_interval": "1m0s", "ec2.placement.availability_zone_id": "euw1-az1", "ec2.security_groups": "", "ec2.local_hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "ec2.placement.availability_zone": "eu-west-1c", "profiling.agent.config.upload_symbols": false, "profiling.host.sysctl.kernel.bpf_stats_enabled": 0, "profiling.host.name": "ip-192-168-1-2", "ec2.mac": "00:11:22:33:44:55", "profiling.host.kernel_proc_version": "Linux version 9.9.9-0-aws", "profiling.agent.config.cache_directory": "/var/cache/optimyze/", "profiling.agent.version": "v8.12.0", "ec2.hostname": "ip-192-168-1-2.eu-west-1.compute.internal", "profiling.agent.config.elastic_mode": false, "ec2.ami_id": "ami-aaaaaaaaaaa", "ec2.instance_id": "i-0b999999999999999" } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java index 0d92bf0a78d09..1e44cba4e62b2 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CO2Calculator.java @@ -18,7 +18,6 @@ final class CO2Calculator { private static final double DEFAULT_KILOWATTS_PER_CORE_ARM64 = 2.8d / 1000.0d; // unit: watt / core private static final double DEFAULT_KILOWATTS_PER_CORE = DEFAULT_KILOWATTS_PER_CORE_X86; // unit: watt / core private static final double DEFAULT_DATACENTER_PUE = 1.7d; - private final InstanceTypeService instanceTypeService; private final Map hostMetadata; private final double samplingDurationInSeconds; private final double customCO2PerKWH; @@ -27,7 +26,6 @@ final class CO2Calculator { private final double customKilowattsPerCoreARM64; CO2Calculator( - InstanceTypeService instanceTypeService, Map hostMetadata, double samplingDurationInSeconds, Double customCO2PerKWH, @@ -35,7 +33,6 @@ final class CO2Calculator { Double customPerCoreWattX86, Double customPerCoreWattARM64 ) { - this.instanceTypeService = instanceTypeService; this.hostMetadata = hostMetadata; this.samplingDurationInSeconds = samplingDurationInSeconds > 0 ? samplingDurationInSeconds : 1.0d; // avoid division by zero this.customCO2PerKWH = customCO2PerKWH == null ? DEFAULT_CO2_TONS_PER_KWH : customCO2PerKWH; @@ -54,7 +51,7 @@ public double getAnnualCO2Tons(String hostID, long samples) { return DEFAULT_KILOWATTS_PER_CORE * customCO2PerKWH * annualCoreHours * customDatacenterPUE; } - CostEntry costs = instanceTypeService.getCosts(host.instanceType); + CostEntry costs = InstanceTypeService.getCosts(host.instanceType); if (costs == null) { return getKiloWattsPerCore(host) * getCO2TonsPerKWH(host) * annualCoreHours * getDatacenterPUE(host); } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java index 05319ba7d1cc4..ecaaee5d3bf4b 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/CostCalculator.java @@ -15,20 +15,17 @@ final class CostCalculator { private static final double SECONDS_PER_YEAR = SECONDS_PER_HOUR * 24 * 365.0d; // unit: seconds private static final double DEFAULT_COST_USD_PER_CORE_HOUR = 0.0425d; // unit: USD / (core * hour) private static final double DEFAULT_AWS_COST_FACTOR = 1.0d; - private final InstanceTypeService instanceTypeService; private final Map hostMetadata; private final double samplingDurationInSeconds; private final double awsCostFactor; private final double customCostPerCoreHour; CostCalculator( - InstanceTypeService instanceTypeService, Map hostMetadata, double samplingDurationInSeconds, Double awsCostFactor, Double customCostPerCoreHour ) { - this.instanceTypeService = instanceTypeService; this.hostMetadata = hostMetadata; this.samplingDurationInSeconds = samplingDurationInSeconds > 0 ? samplingDurationInSeconds : 1.0d; // avoid division by zero this.awsCostFactor = awsCostFactor == null ? DEFAULT_AWS_COST_FACTOR : awsCostFactor; @@ -45,7 +42,7 @@ public double annualCostsUSD(String hostID, double samples) { double providerCostFactor = host.instanceType.provider.equals("aws") ? awsCostFactor : 1.0d; - CostEntry costs = instanceTypeService.getCosts(host.instanceType); + CostEntry costs = InstanceTypeService.getCosts(host.instanceType); if (costs == null) { return annualCoreHours * customCostPerCoreHour * providerCostFactor; } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java index 79f8632238d4c..fc04f735fdf87 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphAction.java @@ -7,12 +7,13 @@ package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.io.stream.Writeable; public final class GetFlamegraphAction extends ActionType { public static final GetFlamegraphAction INSTANCE = new GetFlamegraphAction(); public static final String NAME = "indices:data/read/profiling/flamegraph"; private GetFlamegraphAction() { - super(NAME, GetFlamegraphResponse::new); + super(NAME, Writeable.Reader.localOnly()); } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java index 457faecf4ad54..468b74ed16000 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetFlamegraphResponse.java @@ -8,8 +8,8 @@ package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; @@ -43,30 +43,6 @@ public class GetFlamegraphResponse extends ActionResponse implements ChunkedToXC private final List annualCostsUSDInclusive; private final List annualCostsUSDExclusive; - public GetFlamegraphResponse(StreamInput in) throws IOException { - this.size = in.readInt(); - this.samplingRate = in.readDouble(); - this.edges = in.readCollectionAsList(i -> i.readMap(StreamInput::readInt)); - this.fileIds = in.readCollectionAsList(StreamInput::readString); - this.frameTypes = in.readCollectionAsList(StreamInput::readInt); - this.inlineFrames = in.readCollectionAsList(StreamInput::readBoolean); - this.fileNames = in.readCollectionAsList(StreamInput::readString); - this.addressOrLines = in.readCollectionAsList(StreamInput::readInt); - this.functionNames = in.readCollectionAsList(StreamInput::readString); - this.functionOffsets = in.readCollectionAsList(StreamInput::readInt); - this.sourceFileNames = in.readCollectionAsList(StreamInput::readString); - this.sourceLines = in.readCollectionAsList(StreamInput::readInt); - this.countInclusive = in.readCollectionAsList(StreamInput::readLong); - this.countExclusive = in.readCollectionAsList(StreamInput::readLong); - this.annualCO2TonsInclusive = in.readCollectionAsList(StreamInput::readDouble); - this.annualCO2TonsExclusive = in.readCollectionAsList(StreamInput::readDouble); - this.annualCostsUSDInclusive = in.readCollectionAsList(StreamInput::readDouble); - this.annualCostsUSDExclusive = in.readCollectionAsList(StreamInput::readDouble); - this.selfCPU = in.readLong(); - this.totalCPU = in.readLong(); - this.totalSamples = in.readLong(); - } - public GetFlamegraphResponse( int size, double samplingRate, @@ -115,27 +91,7 @@ public GetFlamegraphResponse( @Override public void writeTo(StreamOutput out) throws IOException { - out.writeInt(this.size); - out.writeDouble(this.samplingRate); - out.writeCollection(this.edges, (o, v) -> o.writeMap(v, StreamOutput::writeString, StreamOutput::writeInt)); - out.writeCollection(this.fileIds, StreamOutput::writeString); - out.writeCollection(this.frameTypes, StreamOutput::writeInt); - out.writeCollection(this.inlineFrames, StreamOutput::writeBoolean); - out.writeCollection(this.fileNames, StreamOutput::writeString); - out.writeCollection(this.addressOrLines, StreamOutput::writeInt); - out.writeCollection(this.functionNames, StreamOutput::writeString); - out.writeCollection(this.functionOffsets, StreamOutput::writeInt); - out.writeCollection(this.sourceFileNames, StreamOutput::writeString); - out.writeCollection(this.sourceLines, StreamOutput::writeInt); - out.writeCollection(this.countInclusive, StreamOutput::writeLong); - out.writeCollection(this.countExclusive, StreamOutput::writeLong); - out.writeCollection(this.annualCO2TonsInclusive, StreamOutput::writeDouble); - out.writeCollection(this.annualCO2TonsExclusive, StreamOutput::writeDouble); - out.writeCollection(this.annualCostsUSDInclusive, StreamOutput::writeDouble); - out.writeCollection(this.annualCostsUSDExclusive, StreamOutput::writeDouble); - out.writeLong(this.selfCPU); - out.writeLong(this.totalCPU); - out.writeLong(this.totalSamples); + TransportAction.localOnly(); } public int getSize() { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java index 8df5b1ec9154e..84ab6643be781 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesAction.java @@ -7,12 +7,13 @@ package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionType; +import org.elasticsearch.common.io.stream.Writeable; public final class GetStackTracesAction extends ActionType { public static final GetStackTracesAction INSTANCE = new GetStackTracesAction(); public static final String NAME = "indices:data/read/profiling/stack_traces"; private GetStackTracesAction() { - super(NAME, GetStackTracesResponse::new); + super(NAME, Writeable.Reader.localOnly()); } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java index f81b5f01caae3..efa8fc1d64244 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesRequest.java @@ -10,8 +10,8 @@ import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.tasks.CancellableTask; @@ -93,35 +93,9 @@ public GetStackTracesRequest( this.customCostPerCoreHour = customCostPerCoreHour; } - public GetStackTracesRequest(StreamInput in) throws IOException { - this.query = in.readOptionalNamedWriteable(QueryBuilder.class); - this.sampleSize = in.readOptionalInt(); - this.requestedDuration = in.readOptionalDouble(); - this.awsCostFactor = in.readOptionalDouble(); - this.adjustSampleCount = in.readOptionalBoolean(); - this.indices = in.readOptionalString(); - this.stackTraceIds = in.readOptionalString(); - this.customCO2PerKWH = in.readOptionalDouble(); - this.customDatacenterPUE = in.readOptionalDouble(); - this.customPerCoreWattX86 = in.readOptionalDouble(); - this.customPerCoreWattARM64 = in.readOptionalDouble(); - this.customCostPerCoreHour = in.readOptionalDouble(); - } - @Override - public void writeTo(StreamOutput out) throws IOException { - out.writeOptionalNamedWriteable(query); - out.writeOptionalInt(sampleSize); - out.writeOptionalDouble(requestedDuration); - out.writeOptionalDouble(awsCostFactor); - out.writeOptionalBoolean(adjustSampleCount); - out.writeOptionalString(indices); - out.writeOptionalString(stackTraceIds); - out.writeOptionalDouble(customCO2PerKWH); - out.writeOptionalDouble(customDatacenterPUE); - out.writeOptionalDouble(customPerCoreWattX86); - out.writeOptionalDouble(customPerCoreWattARM64); - out.writeOptionalDouble(customCostPerCoreHour); + public void writeTo(StreamOutput out) { + TransportAction.localOnly(); } public Integer getSampleSize() { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java index 2f1e15252c277..89c0b4ab6b0fb 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/GetStackTracesResponse.java @@ -7,16 +7,15 @@ package org.elasticsearch.xpack.profiling; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.common.Strings; import org.elasticsearch.common.collect.Iterators; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.ChunkedToXContentHelper; import org.elasticsearch.common.xcontent.ChunkedToXContentObject; import org.elasticsearch.core.Nullable; import org.elasticsearch.xcontent.ToXContent; -import java.io.IOException; import java.util.Collections; import java.util.Iterator; import java.util.Map; @@ -36,37 +35,6 @@ public class GetStackTracesResponse extends ActionResponse implements ChunkedToX private final double samplingRate; private final long totalSamples; - public GetStackTracesResponse(StreamInput in) throws IOException { - this.stackTraces = in.readBoolean() - ? in.readMap( - i -> new StackTrace( - i.readCollectionAsList(StreamInput::readInt), - i.readCollectionAsList(StreamInput::readString), - i.readCollectionAsList(StreamInput::readString), - i.readCollectionAsList(StreamInput::readInt), - i.readDouble(), - i.readDouble(), - i.readLong() - ) - ) - : null; - this.stackFrames = in.readBoolean() - ? in.readMap( - i -> new StackFrame( - i.readCollectionAsList(StreamInput::readString), - i.readCollectionAsList(StreamInput::readString), - i.readCollectionAsList(StreamInput::readInt), - i.readCollectionAsList(StreamInput::readInt) - ) - ) - : null; - this.executables = in.readBoolean() ? in.readMap(StreamInput::readString) : null; - this.stackTraceEvents = in.readBoolean() ? in.readMap(i -> new TraceEvent(i.readString(), i.readLong())) : null; - this.totalFrames = in.readInt(); - this.samplingRate = in.readDouble(); - this.totalSamples = in.readLong(); - } - public GetStackTracesResponse( Map stackTraces, Map stackFrames, @@ -86,50 +54,8 @@ public GetStackTracesResponse( } @Override - public void writeTo(StreamOutput out) throws IOException { - if (stackTraces != null) { - out.writeBoolean(true); - out.writeMap(stackTraces, (o, v) -> { - o.writeCollection(v.addressOrLines, StreamOutput::writeInt); - o.writeStringCollection(v.fileIds); - o.writeStringCollection(v.frameIds); - o.writeCollection(v.typeIds, StreamOutput::writeInt); - o.writeDouble(v.annualCO2Tons); - o.writeDouble(v.annualCostsUSD); - o.writeLong(v.count); - }); - } else { - out.writeBoolean(false); - } - if (stackFrames != null) { - out.writeBoolean(true); - out.writeMap(stackFrames, (o, v) -> { - o.writeStringCollection(v.fileName); - o.writeStringCollection(v.functionName); - o.writeCollection(v.functionOffset, StreamOutput::writeInt); - o.writeCollection(v.lineNumber, StreamOutput::writeInt); - }); - } else { - out.writeBoolean(false); - } - if (executables != null) { - out.writeBoolean(true); - out.writeMap(executables, StreamOutput::writeString); - } else { - out.writeBoolean(false); - } - if (stackTraceEvents != null) { - out.writeBoolean(true); - out.writeMap(stackTraceEvents, (o, v) -> { - o.writeString(v.stacktraceID); - o.writeLong(v.count); - }); - } else { - out.writeBoolean(false); - } - out.writeInt(totalFrames); - out.writeDouble(samplingRate); - out.writeLong(totalSamples); + public void writeTo(StreamOutput out) { + TransportAction.localOnly(); } public Map getStackTraces() { diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java index 98e75ff264375..150b2639e9ac3 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceType.java @@ -23,19 +23,9 @@ final class InstanceType implements ToXContentObject { final String name; InstanceType(String provider, String region, String name) { - this.provider = provider; - this.region = region; - this.name = name; - } - - /** - * Creates a {@link InstanceType} from a {@link Map} of source data provided from JSON or profiling-costs. - * - * @param source the source data - * @return the {@link InstanceType} - */ - public static InstanceType fromCostSource(Map source) { - return new InstanceType((String) source.get("provider"), (String) source.get("region"), (String) source.get("instance_type")); + this.provider = provider != null ? provider : ""; + this.region = region != null ? region : ""; + this.name = name != null ? name : ""; } /** @@ -45,16 +35,45 @@ public static InstanceType fromCostSource(Map source) { * @return the {@link InstanceType} */ public static InstanceType fromHostSource(Map source) { + // Check and handle AWS. + String region = (String) source.get("ec2.placement.region"); + if (region != null) { + String instanceType = (String) source.get("ec2.instance_type"); + return new InstanceType("aws", region, instanceType); + } + + // Check and handle GCP. + String zone = (String) source.get("gce.instance.zone"); + if (zone != null) { + // example: "gce.instance.zone": "projects/123456789/zones/europe-west1-b" + region = zone.substring(zone.lastIndexOf('/') + 1); + // region consist of the zone's first two tokens + String[] tokens = region.split("-", 3); + if (tokens.length > 2) { + region = tokens[0] + "-" + tokens[1]; + } + + // Support for instance type is planned for 8.13. + return new InstanceType("gcp", region, null); + } + + // Check and handle Azure. + region = (String) source.get("azure.compute.location"); + if (region != null) { + // example: "azure.compute.location": "eastus2" + // Support for instance type is planned for 8.13. + return new InstanceType("azure", region, null); + } + + // Support for configured tags (ECS). // Example of tags: // "profiling.host.tags": [ // "cloud_provider:aws", // "cloud_environment:qa", // "cloud_region:eu-west-1", // ], - String provider = ""; - String region = ""; - String instanceType = ""; - + String provider = null; + region = null; List tags = listOf(source.get("profiling.host.tags")); for (String tag : tags) { String[] kv = tag.toLowerCase(Locale.ROOT).split(":", 2); @@ -69,14 +88,7 @@ public static InstanceType fromHostSource(Map source) { } } - // We only support AWS for 8.12, but plan for GCP and Azure later. - // "gcp": check 'gce.instance.name' or 'gce.instance.name' to extract the instanceType - // "azure": extract the instanceType - if ("aws".equals(provider)) { - instanceType = (String) source.get("ec2.instance_type"); - } - - return new InstanceType(provider, region, instanceType); + return new InstanceType(provider, region, null); } @SuppressWarnings("unchecked") @@ -109,7 +121,7 @@ public boolean equals(Object o) { return false; } InstanceType that = (InstanceType) o; - return Objects.equals(provider, that.provider) && Objects.equals(region, that.region) && Objects.equals(name, that.name); + return provider.equals(that.provider) && region.equals(that.region) && name.equals(that.name); } @Override @@ -119,6 +131,6 @@ public int hashCode() { @Override public String toString() { - return name + " in region " + region; + return "provider '" + name + "' in region '" + region + "'"; } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceTypeService.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceTypeService.java index 570a2c499fe35..58dd19c91f966 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceTypeService.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/InstanceTypeService.java @@ -13,36 +13,51 @@ import org.elasticsearch.xcontent.XContentType; import java.io.IOException; -import java.io.UncheckedIOException; import java.util.HashMap; import java.util.List; import java.util.Map; +import java.util.function.Function; import java.util.zip.GZIPInputStream; -public class InstanceTypeService { - private final Map costsPerDatacenter = new HashMap<>(); - - public void load() { - try ( - GZIPInputStream in = new GZIPInputStream( - InstanceTypeService.class.getClassLoader().getResourceAsStream("profiling-costs.json.gz") - ) - ) { - XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, in); - if (parser.currentToken() == null) { - parser.nextToken(); - } - List> rawData = XContentParserUtils.parseList(parser, XContentParser::map); - for (Map entry : rawData) { - costsPerDatacenter.put(InstanceType.fromCostSource(entry), CostEntry.fromSource(entry)); - } +public final class InstanceTypeService { + + private InstanceTypeService() {} - } catch (IOException e) { - throw new UncheckedIOException(e); + private static final class Holder { + private static final Map costsPerDatacenter; + + static { + final Map objects = new HashMap<>(); + final Function dedupString = s -> (String) objects.computeIfAbsent(s, Function.identity()); + final Map tmp = new HashMap<>(); + try ( + GZIPInputStream in = new GZIPInputStream( + InstanceTypeService.class.getClassLoader().getResourceAsStream("profiling-costs.json.gz") + ); + XContentParser parser = XContentType.JSON.xContent().createParser(XContentParserConfiguration.EMPTY, in) + ) { + if (parser.currentToken() == null) { + parser.nextToken(); + } + List> rawData = XContentParserUtils.parseList(parser, XContentParser::map); + for (Map entry : rawData) { + tmp.put( + new InstanceType( + dedupString.apply((String) entry.get("provider")), + dedupString.apply((String) entry.get("region")), + dedupString.apply((String) entry.get("instance_type")) + ), + (CostEntry) objects.computeIfAbsent(CostEntry.fromSource(entry), Function.identity()) + ); + } + costsPerDatacenter = Map.copyOf(tmp); + } catch (IOException e) { + throw new ExceptionInInitializerError(e); + } } } - public CostEntry getCosts(InstanceType instance) { - return costsPerDatacenter.get(instance); + public static CostEntry getCosts(InstanceType instance) { + return Holder.costsPerDatacenter.get(instance); } } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java index 0068d03767387..ce15982450a66 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingIndexTemplateRegistry.java @@ -174,11 +174,8 @@ protected List getLifecyclePolicies() { indexVersion("symbols", PROFILING_SYMBOLS_VERSION) ) )) { - try { - componentTemplates.put( - config.getTemplateName(), - ComponentTemplate.parse(JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, config.loadBytes())) - ); + try (var parser = JsonXContent.jsonXContent.createParser(XContentParserConfiguration.EMPTY, config.loadBytes())) { + componentTemplates.put(config.getTemplateName(), ComponentTemplate.parse(parser)); } catch (IOException e) { throw new AssertionError(e); } diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java index a2459f839523b..b105cde3d5c2a 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/ProfilingPlugin.java @@ -86,24 +86,18 @@ public Collection createComponents(PluginServices services) { // set initial value updateTemplatesEnabled(PROFILING_TEMPLATES_ENABLED.get(settings)); clusterService.getClusterSettings().addSettingsUpdateConsumer(PROFILING_TEMPLATES_ENABLED, this::updateTemplatesEnabled); - InstanceTypeService instanceTypeService = createInstanceTypeService(); if (enabled) { registry.get().initialize(); indexManager.get().initialize(); dataStreamManager.get().initialize(); - instanceTypeService.load(); } - return List.of(createLicenseChecker(), instanceTypeService); + return List.of(createLicenseChecker()); } protected ProfilingLicenseChecker createLicenseChecker() { return new ProfilingLicenseChecker(XPackPlugin::getSharedLicenseState); } - protected InstanceTypeService createInstanceTypeService() { - return new InstanceTypeService(); - } - public void updateCheckOutdatedIndices(boolean newValue) { if (newValue == false) { logger.info("profiling will ignore outdated indices"); diff --git a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java index 3cd9ded3005a2..dd78d6f1815f5 100644 --- a/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java +++ b/x-pack/plugin/profiling/src/main/java/org/elasticsearch/xpack/profiling/TransportGetFlamegraphAction.java @@ -11,12 +11,11 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.action.support.TransportAction; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.inject.Inject; -import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; @@ -27,7 +26,7 @@ import java.util.SortedMap; import java.util.TreeMap; -public class TransportGetFlamegraphAction extends HandledTransportAction { +public class TransportGetFlamegraphAction extends TransportAction { private static final Logger log = LogManager.getLogger(TransportGetFlamegraphAction.class); private static final StackFrame EMPTY_STACKFRAME = new StackFrame("", "", 0, 0); @@ -36,7 +35,7 @@ public class TransportGetFlamegraphAction extends HandledTransportAction { +public class TransportGetStackTracesAction extends TransportAction { private static final Logger log = LogManager.getLogger(TransportGetStackTracesAction.class); public static final Setting PROFILING_MAX_STACKTRACE_QUERY_SLICES = Setting.intSetting( @@ -111,7 +110,6 @@ public class TransportGetStackTracesAction extends HandledTransportAction hostsTable = Map.ofEntries( Map.entry(HOST_ID_A, @@ -40,7 +37,7 @@ public void testCreateFromRegularSource() { new InstanceType( "gcp", "europe-west1", - "" // Doesn't matter for unknown datacenters. + null // Doesn't matter for unknown datacenters. ), "x86_64" ) @@ -51,7 +48,7 @@ public void testCreateFromRegularSource() { new InstanceType( "azure", "northcentralus", - "" // Doesn't matter for unknown datacenters. + null // Doesn't matter for unknown datacenters. ), "aarch64" ) @@ -62,7 +59,7 @@ public void testCreateFromRegularSource() { new InstanceType( "on-prem-provider", "on-prem-region", - "" // Doesn't matter for unknown datacenters. + null // Doesn't matter for unknown datacenters. ), "aarch64" ) @@ -73,7 +70,7 @@ public void testCreateFromRegularSource() { double samplingDurationInSeconds = 1_800.0d; // 30 minutes long samples = 100_000L; // 100k samples double annualCoreHours = CostCalculator.annualCoreHours(samplingDurationInSeconds, samples, 20.0d); - CO2Calculator co2Calculator = new CO2Calculator(instanceTypeService, hostsTable, samplingDurationInSeconds, null, null, null, null); + CO2Calculator co2Calculator = new CO2Calculator(hostsTable, samplingDurationInSeconds, null, null, null, null); checkCO2Calculation(co2Calculator.getAnnualCO2Tons(HOST_ID_A, samples), annualCoreHours, 0.000002213477d); checkCO2Calculation(co2Calculator.getAnnualCO2Tons(HOST_ID_B, samples), annualCoreHours, 1.1d, 0.00004452d, 7.0d); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CarthesianCombinator.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CarthesianCombinator.java new file mode 100644 index 0000000000000..2982df317a38c --- /dev/null +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CarthesianCombinator.java @@ -0,0 +1,60 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.profiling; + +import java.lang.reflect.Array; +import java.util.function.Consumer; + +public class CarthesianCombinator { + private final T[] elems; + private final int[] index; + private final T[] result; + private final int len; + + @SuppressWarnings("unchecked") + CarthesianCombinator(T[] elems, int len) { + if (elems.length == 0) { + throw new IllegalArgumentException("elems must not be empty"); + } + this.elems = elems; + this.index = new int[len]; + this.result = (T[]) Array.newInstance(elems[0].getClass(), len); + this.len = len; + } + + private void init(int length) { + for (int i = 0; i < length; i++) { + index[i] = 0; + result[i] = elems[0]; + } + } + + public void forEach(Consumer action) { + // Initialize index and result + init(len); + + int pos = 0; + while (pos < len) { + if (index[pos] < elems.length) { + result[pos] = elems[index[pos]]; + action.accept(result); + index[pos]++; + continue; + } + while (pos < len && index[pos] + 1 >= elems.length) { + pos++; + } + if (pos < len) { + index[pos]++; + result[pos] = elems[index[pos]]; + init(pos); + pos = 0; + } + } + } +} diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java index f42ad1188693b..030616d285416 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/CostCalculatorTests.java @@ -16,9 +16,6 @@ public class CostCalculatorTests extends ESTestCase { private static final String HOST_ID_B = "2220256254710195392"; public void testCreateFromRegularSource() { - InstanceTypeService instanceTypeService = new InstanceTypeService(); - instanceTypeService.load(); - // tag::noformat Map hostsTable = Map.ofEntries( Map.entry(HOST_ID_A, @@ -49,7 +46,7 @@ public void testCreateFromRegularSource() { double samplingDurationInSeconds = 1_800.0d; // 30 minutes long samples = 100_000L; // 100k samples double annualCoreHours = CostCalculator.annualCoreHours(samplingDurationInSeconds, samples, 20.0d); - CostCalculator costCalculator = new CostCalculator(instanceTypeService, hostsTable, samplingDurationInSeconds, null, null); + CostCalculator costCalculator = new CostCalculator(hostsTable, samplingDurationInSeconds, null, null); // Checks whether the cost calculation is based on the pre-calculated lookup data. checkCostCalculation(costCalculator.annualCostsUSD(HOST_ID_A, samples), annualCoreHours, 0.061d); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java index 8bf4598cf75f7..f0f328e48d00b 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesRequestTests.java @@ -8,12 +8,8 @@ package org.elasticsearch.xpack.profiling; import org.elasticsearch.common.ParsingException; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.query.BoolQueryBuilder; -import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.ESTestCase; @@ -28,47 +24,6 @@ import static java.util.Collections.emptyList; public class GetStackTracesRequestTests extends ESTestCase { - public void testSerialization() throws IOException { - Integer sampleSize = randomIntBetween(1, Integer.MAX_VALUE); - Double requestedDuration = randomBoolean() ? randomDoubleBetween(0.001d, Double.MAX_VALUE, true) : null; - Double awsCostFactor = randomBoolean() ? randomDoubleBetween(0.1d, 5.0d, true) : null; - Double customCO2PerKWH = randomBoolean() ? randomDoubleBetween(0.000001d, 0.001d, true) : null; - Double datacenterPUE = randomBoolean() ? randomDoubleBetween(1.0d, 3.0d, true) : null; - Double perCoreWattX86 = randomBoolean() ? randomDoubleBetween(0.01d, 20.0d, true) : null; - Double perCoreWattARM64 = randomBoolean() ? randomDoubleBetween(0.01d, 20.0d, true) : null; - Double customCostPerCoreHour = randomBoolean() ? randomDoubleBetween(0.001d, 1000.0d, true) : null; - QueryBuilder query = randomBoolean() ? new BoolQueryBuilder() : null; - - GetStackTracesRequest request = new GetStackTracesRequest( - sampleSize, - requestedDuration, - awsCostFactor, - query, - null, - null, - customCO2PerKWH, - datacenterPUE, - perCoreWattX86, - perCoreWattARM64, - customCostPerCoreHour - ); - try (BytesStreamOutput out = new BytesStreamOutput()) { - request.writeTo(out); - try (NamedWriteableAwareStreamInput in = new NamedWriteableAwareStreamInput(out.bytes().streamInput(), writableRegistry())) { - GetStackTracesRequest deserialized = new GetStackTracesRequest(in); - assertEquals(sampleSize, deserialized.getSampleSize()); - assertEquals(requestedDuration, deserialized.getRequestedDuration()); - assertEquals(awsCostFactor, deserialized.getAwsCostFactor()); - assertEquals(customCO2PerKWH, deserialized.getCustomCO2PerKWH()); - assertEquals(datacenterPUE, deserialized.getCustomDatacenterPUE()); - assertEquals(perCoreWattX86, deserialized.getCustomPerCoreWattX86()); - assertEquals(perCoreWattARM64, deserialized.getCustomPerCoreWattARM64()); - assertEquals(customCostPerCoreHour, deserialized.getCustomCostPerCoreHour()); - assertEquals(query, deserialized.getQuery()); - } - } - } - public void testParseValidXContent() throws IOException { try (XContentParser content = createParser(XContentFactory.jsonBuilder() //tag::noformat @@ -93,6 +48,15 @@ public void testParseValidXContent() throws IOException { assertEquals(Double.valueOf(100.54d), request.getRequestedDuration()); // a basic check suffices here assertEquals("@timestamp", ((RangeQueryBuilder) request.getQuery()).fieldName()); + // Expect the default values + assertNull(request.getIndices()); + assertNull(request.getStackTraceIds()); + assertNull(request.getAwsCostFactor()); + assertNull(request.getCustomCO2PerKWH()); + assertNull(request.getCustomDatacenterPUE()); + assertNull(request.getCustomCostPerCoreHour()); + assertNull(request.getCustomPerCoreWattX86()); + assertNull(request.getCustomPerCoreWattARM64()); } } @@ -124,7 +88,57 @@ public void testParseValidXContentWithCustomIndex() throws IOException { assertEquals("@timestamp", ((RangeQueryBuilder) request.getQuery()).fieldName()); // Expect the default values - assertEquals(null, request.getRequestedDuration()); + assertNull(request.getRequestedDuration()); + assertNull(request.getAwsCostFactor()); + assertNull(request.getCustomCO2PerKWH()); + assertNull(request.getCustomDatacenterPUE()); + assertNull(request.getCustomCostPerCoreHour()); + assertNull(request.getCustomPerCoreWattX86()); + assertNull(request.getCustomPerCoreWattARM64()); + } + } + + public void testParseValidXContentWithCustomCostAndCO2Data() throws IOException { + try (XContentParser content = createParser(XContentFactory.jsonBuilder() + //tag::noformat + .startObject() + .field("sample_size", 2000) + .field("requested_duration", 100.54d) + .field("aws_cost_factor", 7.3d) + .field("co2_per_kwh", 22.4d) + .field("datacenter_pue", 1.05d) + .field("cost_per_core_hour", 3.32d) + .field("per_core_watt_x86", 7.2d) + .field("per_core_watt_arm64", 2.82d) + .startObject("query") + .startObject("range") + .startObject("@timestamp") + .field("gte", "2022-10-05") + .endObject() + .endObject() + .endObject() + .endObject() + //end::noformat + )) { + + GetStackTracesRequest request = new GetStackTracesRequest(); + request.parseXContent(content); + + assertEquals(Integer.valueOf(2000), request.getSampleSize()); + assertEquals(Double.valueOf(100.54d), request.getRequestedDuration()); + assertEquals(Double.valueOf(7.3d), request.getAwsCostFactor()); + assertEquals(Double.valueOf(22.4d), request.getCustomCO2PerKWH()); + assertEquals(Double.valueOf(1.05d), request.getCustomDatacenterPUE()); + assertEquals(Double.valueOf(3.32d), request.getCustomCostPerCoreHour()); + assertEquals(Double.valueOf(7.2d), request.getCustomPerCoreWattX86()); + assertEquals(Double.valueOf(2.82d), request.getCustomPerCoreWattARM64()); + + // a basic check suffices here + assertEquals("@timestamp", ((RangeQueryBuilder) request.getQuery()).fieldName()); + + // Expect the default values + assertNull(request.getIndices()); + assertNull(request.getStackTraceIds()); } } @@ -246,7 +260,6 @@ public void testConsidersCustomIndicesInRelatedIndices() { } public void testConsidersDefaultIndicesInRelatedIndices() { - String customIndex = randomAlphaOfLength(5); GetStackTracesRequest request = new GetStackTracesRequest(1, 1.0d, 1.0d, null, null, null, null, null, null, null, null); String[] indices = request.indices(); assertEquals(15, indices.length); diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java index 7455c2b30e13d..99a34719f96c9 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/GetStackTracesResponseTests.java @@ -7,20 +7,18 @@ package org.elasticsearch.xpack.profiling; -import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.test.AbstractChunkedSerializingTestCase; -import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.test.ESTestCase; import java.util.List; import java.util.Map; -public class GetStackTracesResponseTests extends AbstractWireSerializingTestCase { +public class GetStackTracesResponseTests extends ESTestCase { private T randomNullable(T v) { return randomBoolean() ? v : null; } - @Override - protected GetStackTracesResponse createTestInstance() { + private GetStackTracesResponse createTestInstance() { int totalFrames = randomIntBetween(1, 100); Map stackTraces = randomNullable( @@ -57,16 +55,6 @@ protected GetStackTracesResponse createTestInstance() { return new GetStackTracesResponse(stackTraces, stackFrames, executables, stackTraceEvents, totalFrames, 1.0, totalSamples); } - @Override - protected GetStackTracesResponse mutateInstance(GetStackTracesResponse instance) { - return null;// TODO implement https://github.com/elastic/elasticsearch/issues/25929 - } - - @Override - protected Writeable.Reader instanceReader() { - return GetStackTracesResponse::new; - } - public void testChunking() { AbstractChunkedSerializingTestCase.assertChunkCount(createTestInstance(), instance -> { // start, end, total_frames, samplingrate diff --git a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java index 0359357004687..d8f93cd129916 100644 --- a/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java +++ b/x-pack/plugin/profiling/src/test/java/org/elasticsearch/xpack/profiling/HostMetadataTests.java @@ -13,7 +13,7 @@ import java.util.Map; public class HostMetadataTests extends ESTestCase { - public void testCreateFromRegularSource() { + public void testCreateFromSourceAWS() { final String hostID = "1440256254710195396"; final String machine = "x86_64"; final String provider = "aws"; @@ -25,9 +25,8 @@ public void testCreateFromRegularSource() { Map.of( "host.id", hostID, "profiling.host.machine", machine, - "profiling.host.tags", Arrays.asList( - "cloud_provider:"+provider, "cloud_environment:qa", "cloud_region:"+region), - "ec2.instance_type", instanceType + "ec2.instance_type", instanceType, + "ec2.placement.region", region ) ); // end::noformat @@ -38,4 +37,141 @@ public void testCreateFromRegularSource() { assertEquals(region, host.instanceType.region); assertEquals(instanceType, host.instanceType.name); } + + public void testCreateFromSourceGCP() { + final String hostID = "1440256254710195396"; + final String machine = "x86_64"; + final String provider = "gcp"; + final String[] regions = { "", "", "europe-west1", "europewest", "europe-west1" }; + final String[] zones = { + "", + "/", + "projects/123456789/zones/" + regions[2] + "-b", + "projects/123456789/zones/" + regions[3], + "projects/123456789/zones/" + regions[4] + "-b-c" }; + + for (int i = 0; i < regions.length; i++) { + String region = regions[i]; + String zone = zones[i]; + + // tag::noformat + HostMetadata host = HostMetadata.fromSource( + Map.of( + "host.id", hostID, + "profiling.host.machine", machine, + "gce.instance.zone", zone + ) + ); + // end::noformat + + assertEquals(hostID, host.hostID); + assertEquals(machine, host.profilingHostMachine); + assertEquals(provider, host.instanceType.provider); + assertEquals(region, host.instanceType.region); + assertEquals("", host.instanceType.name); + } + } + + public void testCreateFromSourceGCPZoneFuzzer() { + final String hostID = "1440256254710195396"; + final String machine = "x86_64"; + final String provider = "gcp"; + final Character[] chars = new Character[] { '/', '-', 'a' }; + + for (int zoneLength = 1; zoneLength <= 5; zoneLength++) { + CarthesianCombinator combinator = new CarthesianCombinator<>(chars, zoneLength); + + combinator.forEach((result) -> { + StringBuilder sb = new StringBuilder(); + for (Character c : result) { + sb.append(c); + } + String zone = sb.toString(); + + // tag::noformat + HostMetadata host = HostMetadata.fromSource( + Map.of( + "host.id", hostID, + "profiling.host.machine", machine, + "gce.instance.zone", zone + ) + ); + // end::noformat + + assertEquals(hostID, host.hostID); + assertEquals(machine, host.profilingHostMachine); + assertEquals(provider, host.instanceType.provider); + assertNotNull(host.instanceType.region); + assertEquals("", host.instanceType.name); + // region isn't tested because of the combinatorial nature of this test + }); + } + } + + public void testCreateFromSourceAzure() { + final String hostID = "1440256254710195396"; + final String machine = "x86_64"; + final String provider = "azure"; + final String region = "eastus2"; + + // tag::noformat + HostMetadata host = HostMetadata.fromSource( + Map.of( + "host.id", hostID, + "profiling.host.machine", machine, + "azure.compute.location", region + ) + ); + // end::noformat + + assertEquals(hostID, host.hostID); + assertEquals(machine, host.profilingHostMachine); + assertEquals(provider, host.instanceType.provider); + assertEquals(region, host.instanceType.region); + assertEquals("", host.instanceType.name); + } + + public void testCreateFromSourceECS() { + final String hostID = "1440256254710195396"; + final String machine = "x86_64"; + final String provider = "any-provider"; + final String region = "any-region"; + + // tag::noformat + HostMetadata host = HostMetadata.fromSource( + Map.of( + "host.id", hostID, + "profiling.host.machine", machine, + "profiling.host.tags", Arrays.asList( + "cloud_provider:"+provider, "cloud_environment:qa", "cloud_region:"+region) + ) + ); + // end::noformat + + assertEquals(hostID, host.hostID); + assertEquals(machine, host.profilingHostMachine); + assertEquals(provider, host.instanceType.provider); + assertEquals(region, host.instanceType.region); + assertEquals("", host.instanceType.name); + } + + public void testCreateFromSourceNoProvider() { + final String hostID = "1440256254710195396"; + final String machine = "x86_64"; + + // tag::noformat + HostMetadata host = HostMetadata.fromSource( + Map.of( + "host.id", hostID, + "profiling.host.machine", machine + ) + ); + // end::noformat + + assertEquals(hostID, host.hostID); + assertEquals(machine, host.profilingHostMachine); + assertEquals("", host.instanceType.provider); + assertEquals("", host.instanceType.region); + assertEquals("", host.instanceType.name); + } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java index 29705d9e4b116..d10ed1775b024 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/async/AsyncTaskManagementService.java @@ -215,6 +215,7 @@ private ActionListener wrapStoringListener( acquiredListener.onResponse(operation.initialResponse(searchTask)); } }, waitForCompletionTimeout, threadPool.executor(ThreadPool.Names.SEARCH)); + // This will be performed at the end of normal execution return ActionListener.wrap(response -> { ActionListener acquiredListener = exclusiveListener.getAndSet(null); @@ -234,7 +235,11 @@ private ActionListener wrapStoringListener( } } else { // We finished after timeout - saving results - storeResults(searchTask, new StoredAsyncResponse<>(response, threadPool.absoluteTimeInMillis() + keepAlive.getMillis())); + storeResults( + searchTask, + new StoredAsyncResponse<>(response, threadPool.absoluteTimeInMillis() + keepAlive.getMillis()), + ActionListener.running(response::decRef) + ); } }, e -> { ActionListener acquiredListener = exclusiveListener.getAndSet(null); @@ -272,6 +277,7 @@ private void storeResults(T searchTask, StoredAsyncResponse storedResp ActionListener.wrap( // We should only unregister after the result is saved resp -> { + // TODO: generalize the logging, not just eql logger.trace(() -> "stored eql search results for [" + searchTask.getExecutionId().getEncoded() + "]"); taskManager.unregister(searchTask); if (storedResponse.getException() != null) { @@ -290,6 +296,7 @@ private void storeResults(T searchTask, StoredAsyncResponse storedResp if (cause instanceof DocumentMissingException == false && cause instanceof VersionConflictEngineException == false) { logger.error( + // TODO: generalize the logging, not just eql () -> format("failed to store eql search results for [%s]", searchTask.getExecutionId().getEncoded()), exc ); diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java index 5d9736726b46f..f084b5cda4abe 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRules.java @@ -8,6 +8,8 @@ import org.elasticsearch.common.util.set.Sets; import org.elasticsearch.xpack.ql.expression.Alias; +import org.elasticsearch.xpack.ql.expression.Attribute; +import org.elasticsearch.xpack.ql.expression.AttributeMap; import org.elasticsearch.xpack.ql.expression.Expression; import org.elasticsearch.xpack.ql.expression.Expressions; import org.elasticsearch.xpack.ql.expression.Literal; @@ -69,6 +71,7 @@ import static java.lang.Math.signum; import static java.util.Arrays.asList; +import static java.util.Collections.emptySet; import static org.elasticsearch.xpack.ql.expression.Literal.FALSE; import static org.elasticsearch.xpack.ql.expression.Literal.TRUE; import static org.elasticsearch.xpack.ql.expression.predicate.Predicates.combineAnd; @@ -1785,6 +1788,93 @@ protected Expression nonNullify(Expression exp, Expression nonNullExp) { } } + /** + * Simplify IsNotNull targets by resolving the underlying expression to its root fields with unknown + * nullability. + * e.g. + * (x + 1) / 2 IS NOT NULL --> x IS NOT NULL AND (x+1) / 2 IS NOT NULL + * SUBSTRING(x, 3) > 4 IS NOT NULL --> x IS NOT NULL AND SUBSTRING(x, 3) > 4 IS NOT NULL + * When dealing with multiple fields, a conjunction/disjunction based on the predicate: + * (x + y) / 4 IS NOT NULL --> x IS NOT NULL AND y IS NOT NULL AND (x + y) / 4 IS NOT NULL + * This handles the case of fields nested inside functions or expressions in order to avoid: + * - having to evaluate the whole expression + * - not pushing down the filter due to expression evaluation + * IS NULL cannot be simplified since it leads to a disjunction which prevents the filter to be + * pushed down: + * (x + 1) IS NULL --> x IS NULL OR x + 1 IS NULL + * and x IS NULL cannot be pushed down + *
+ * Implementation-wise this rule goes bottom-up, keeping an alias up to date to the current plan + * and then looks for replacing the target. + */ + public static class InferIsNotNull extends Rule { + + @Override + public LogicalPlan apply(LogicalPlan plan) { + // the alias map is shared across the whole plan + AttributeMap aliases = new AttributeMap<>(); + // traverse bottom-up to pick up the aliases as we go + plan = plan.transformUp(p -> inspectPlan(p, aliases)); + return plan; + } + + private LogicalPlan inspectPlan(LogicalPlan plan, AttributeMap aliases) { + // inspect just this plan properties + plan.forEachExpression(Alias.class, a -> aliases.put(a.toAttribute(), a.child())); + // now go about finding isNull/isNotNull + LogicalPlan newPlan = plan.transformExpressionsOnlyUp(IsNotNull.class, inn -> inferNotNullable(inn, aliases)); + return newPlan; + } + + private Expression inferNotNullable(IsNotNull inn, AttributeMap aliases) { + Expression result = inn; + Set refs = resolveExpressionAsRootAttributes(inn.field(), aliases); + // no refs found or could not detect - return the original function + if (refs.size() > 0) { + // add IsNull for the filters along with the initial inn + var innList = CollectionUtils.combine(refs.stream().map(r -> (Expression) new IsNotNull(inn.source(), r)).toList(), inn); + result = Predicates.combineAnd(innList); + } + return result; + } + + /** + * Unroll the expression to its references to get to the root fields + * that really matter for filtering. + */ + protected Set resolveExpressionAsRootAttributes(Expression exp, AttributeMap aliases) { + Set resolvedExpressions = new LinkedHashSet<>(); + boolean changed = doResolve(exp, aliases, resolvedExpressions); + return changed ? resolvedExpressions : emptySet(); + } + + private boolean doResolve(Expression exp, AttributeMap aliases, Set resolvedExpressions) { + boolean changed = false; + // check if the expression can be skipped or is not nullabe + if (skipExpression(exp) || exp.nullable() == Nullability.FALSE) { + resolvedExpressions.add(exp); + } else { + for (Expression e : exp.references()) { + Expression resolved = aliases.resolve(e, e); + // found a root attribute, bail out + if (resolved instanceof Attribute a && resolved == e) { + resolvedExpressions.add(a); + // don't mark things as change if the original expression hasn't been broken down + changed |= resolved != exp; + } else { + // go further + changed |= doResolve(resolved, aliases, resolvedExpressions); + } + } + } + return changed; + } + + protected boolean skipExpression(Expression e) { + return false; + } + } + public static final class SetAsOptimized extends Rule { @Override diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/rule/RuleExecutor.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/rule/RuleExecutor.java index cb13cfd651ed3..2ccdd66089c79 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/rule/RuleExecutor.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/rule/RuleExecutor.java @@ -67,6 +67,10 @@ public Batch(String name, Rule... rules) { public String name() { return name; } + + public Rule[] rules() { + return rules; + } } private Iterable> batches = null; diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/NumericUtils.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/NumericUtils.java index b46f94f958433..d2710a980a6ee 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/NumericUtils.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/NumericUtils.java @@ -145,4 +145,25 @@ public static double asFiniteNumber(double dbl) { } return dbl; } + + /** + * Converts a number to an integer, saturating that integer if the number doesn't fit naturally. That is to say, values + * greater than Integer.MAX_VALUE yield Integer.MAX_VALUE and values less than Integer.MIN_VALUE yield Integer.MIN_VALUE + * + * This function exists because Long::intValue() yields -1 and 0 for Long.MAX_VALUE and Long.MIN_VALUE, respectively. + * + * @param n the nubmer to convert + * @return a valid integer + */ + public static int saturatingIntValue(Number n) { + if (n instanceof Long ln) { + if (ln > Integer.MAX_VALUE) { + return Integer.MAX_VALUE; + } + if (ln < Integer.MIN_VALUE) { + return Integer.MIN_VALUE; + } + } + return n.intValue(); + } } diff --git a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java index 57e472cd5bb17..4be9ddd1f3d21 100644 --- a/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java +++ b/x-pack/plugin/ql/src/main/java/org/elasticsearch/xpack/ql/util/SpatialCoordinateTypes.java @@ -111,7 +111,7 @@ public SpatialPoint stringAsPoint(String string) { throw new IllegalArgumentException("Unsupported geometry type " + geometry.type()); } } catch (Exception e) { - throw new RuntimeException("Failed to parse WKT: " + e.getMessage(), e); + throw new IllegalArgumentException("Failed to parse WKT: " + e.getMessage(), e); } } diff --git a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java index b14e46a96a9e6..1cab7dd87195b 100644 --- a/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java +++ b/x-pack/plugin/ql/src/test/java/org/elasticsearch/xpack/ql/optimizer/OptimizerRulesTests.java @@ -14,6 +14,7 @@ import org.elasticsearch.xpack.ql.expression.Literal; import org.elasticsearch.xpack.ql.expression.Nullability; import org.elasticsearch.xpack.ql.expression.function.aggregate.Count; +import org.elasticsearch.xpack.ql.expression.function.scalar.string.StartsWith; import org.elasticsearch.xpack.ql.expression.predicate.BinaryOperator; import org.elasticsearch.xpack.ql.expression.predicate.Predicates; import org.elasticsearch.xpack.ql.expression.predicate.Range; @@ -1768,7 +1769,90 @@ public void testPushDownFilterThroughAgg() throws Exception { // expected Filter expected = new Filter(EMPTY, new Aggregate(EMPTY, combinedFilter, emptyList(), emptyList()), aggregateCondition); assertEquals(expected, new PushDownAndCombineFilters().apply(fb)); + } + + public void testIsNotNullOnIsNullField() { + EsRelation relation = relation(); + var fieldA = getFieldAttribute("a"); + Expression inn = isNotNull(fieldA); + Filter f = new Filter(EMPTY, relation, inn); + + assertEquals(f, new OptimizerRules.InferIsNotNull().apply(f)); + } + + public void testIsNotNullOnOperatorWithOneField() { + EsRelation relation = relation(); + var fieldA = getFieldAttribute("a"); + Expression inn = isNotNull(new Add(EMPTY, fieldA, ONE)); + Filter f = new Filter(EMPTY, relation, inn); + Filter expected = new Filter(EMPTY, relation, new And(EMPTY, isNotNull(fieldA), inn)); + + assertEquals(expected, new OptimizerRules.InferIsNotNull().apply(f)); + } + + public void testIsNotNullOnOperatorWithTwoFields() { + EsRelation relation = relation(); + var fieldA = getFieldAttribute("a"); + var fieldB = getFieldAttribute("b"); + Expression inn = isNotNull(new Add(EMPTY, fieldA, fieldB)); + Filter f = new Filter(EMPTY, relation, inn); + Filter expected = new Filter(EMPTY, relation, new And(EMPTY, new And(EMPTY, isNotNull(fieldA), isNotNull(fieldB)), inn)); + + assertEquals(expected, new OptimizerRules.InferIsNotNull().apply(f)); + } + + public void testIsNotNullOnFunctionWithOneField() { + EsRelation relation = relation(); + var fieldA = getFieldAttribute("a"); + var pattern = L("abc"); + Expression inn = isNotNull( + new And(EMPTY, new TestStartsWith(EMPTY, fieldA, pattern, false), greaterThanOf(new Add(EMPTY, ONE, TWO), THREE)) + ); + + Filter f = new Filter(EMPTY, relation, inn); + Filter expected = new Filter(EMPTY, relation, new And(EMPTY, isNotNull(fieldA), inn)); + + assertEquals(expected, new OptimizerRules.InferIsNotNull().apply(f)); + } + + public void testIsNotNullOnFunctionWithTwoFields() { + EsRelation relation = relation(); + var fieldA = getFieldAttribute("a"); + var fieldB = getFieldAttribute("b"); + var pattern = L("abc"); + Expression inn = isNotNull(new TestStartsWith(EMPTY, fieldA, fieldB, false)); + + Filter f = new Filter(EMPTY, relation, inn); + Filter expected = new Filter(EMPTY, relation, new And(EMPTY, new And(EMPTY, isNotNull(fieldA), isNotNull(fieldB)), inn)); + + assertEquals(expected, new OptimizerRules.InferIsNotNull().apply(f)); + } + + public static class TestStartsWith extends StartsWith { + + public TestStartsWith(Source source, Expression input, Expression pattern, boolean caseInsensitive) { + super(source, input, pattern, caseInsensitive); + } + + @Override + public Expression replaceChildren(List newChildren) { + return new TestStartsWith(source(), newChildren.get(0), newChildren.get(1), isCaseInsensitive()); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, TestStartsWith::new, input(), pattern(), isCaseInsensitive()); + } + } + + public void testIsNotNullOnFunctionWithTwoField() {} + + private IsNotNull isNotNull(Expression field) { + return new IsNotNull(EMPTY, field); + } + private IsNull isNull(Expression field) { + return new IsNull(EMPTY, field); } private Literal nullOf(DataType dataType) { diff --git a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java index a395ac7766b0a..7f5249fe4c2db 100644 --- a/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java +++ b/x-pack/plugin/ql/test-fixtures/src/main/java/org/elasticsearch/xpack/ql/TestUtils.java @@ -74,6 +74,7 @@ import java.util.zip.ZipEntry; import static java.util.Collections.emptyMap; +import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; import static org.elasticsearch.test.ESTestCase.between; import static org.elasticsearch.test.ESTestCase.randomAlphaOfLength; import static org.elasticsearch.test.ESTestCase.randomBoolean; @@ -313,7 +314,7 @@ public static TestNodes buildNodeAndVersions(RestClient client, String bwcNodesV // this json might be from a node <8.8.0, but about a node >=8.8.0 // In that case the transport_version field won't exist. Just ignore it for now. Version version = Version.fromString(nodeVersion); - if (version.before(Version.V_8_8_0)) { + if (version.before(VERSION_INTRODUCING_TRANSPORT_VERSIONS)) { transportVersion = TransportVersion.fromId(version.id); } } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java index e90ad56e3395a..ed3a3f294c65c 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java @@ -34,7 +34,6 @@ import org.elasticsearch.search.aggregations.metrics.Min; import org.elasticsearch.search.aggregations.metrics.Sum; import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.xpack.core.rollup.RollupField; import java.nio.charset.StandardCharsets; @@ -340,20 +339,15 @@ private static SearchResponse mergeFinalResponse( isTerminatedEarly = isTerminatedEarly && liveResponse.isTerminatedEarly(); numReducePhases += liveResponse.getNumReducePhases(); } - - InternalSearchResponse combinedInternal = new InternalSearchResponse( + // Shard failures are ignored atm, so returning an empty array is fine + return new SearchResponse( SearchHits.EMPTY_WITH_TOTAL_HITS, aggs, null, - null, isTimedOut, isTerminatedEarly, - numReducePhases - ); - - // Shard failures are ignored atm, so returning an empty array is fine - return new SearchResponse( - combinedInternal, + null, + numReducePhases, null, totalShards, sucessfulShards, diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java index d6c00e3e89682..e434da37b7585 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestPutRollupJobAction.java @@ -29,7 +29,10 @@ public List routes() { @Override protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { final String id = request.param("id"); - final PutRollupJobAction.Request putRollupJobRequest = PutRollupJobAction.Request.fromXContent(request.contentParser(), id); + final PutRollupJobAction.Request putRollupJobRequest; + try (var parser = request.contentParser()) { + putRollupJobRequest = PutRollupJobAction.Request.fromXContent(parser, id); + } return channel -> client.execute(PutRollupJobAction.INSTANCE, putRollupJobRequest, new RestToXContentListener<>(channel)); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java index 44f5f51668ea3..7e814230a2223 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java @@ -72,7 +72,6 @@ import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator.PipelineTree; import org.elasticsearch.search.aggregations.support.AggregationContext; import org.elasticsearch.search.aggregations.support.ValueType; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.InternalAggregationTestCase; import org.elasticsearch.xpack.core.rollup.RollupField; @@ -516,15 +515,13 @@ public void testMismatch() throws IOException { // TODO SearchResponse.Clusters is not public, using null for now. Should fix upstream. MultiSearchResponse.Item unrolledItem = new MultiSearchResponse.Item( new SearchResponse( - new InternalSearchResponse( - null, - InternalAggregations.from(Collections.singletonList(responses.get(0))), - null, - null, - false, - false, - 1 - ), + null, + InternalAggregations.from(Collections.singletonList(responses.get(0))), + null, + false, + false, + null, + 1, null, 1, 1, @@ -537,15 +534,13 @@ public void testMismatch() throws IOException { ); MultiSearchResponse.Item rolledItem = new MultiSearchResponse.Item( new SearchResponse( - new InternalSearchResponse( - null, - InternalAggregations.from(Collections.singletonList(responses.get(1))), - null, - null, - false, - false, - 1 - ), + null, + InternalAggregations.from(Collections.singletonList(responses.get(1))), + null, + false, + false, + null, + 1, null, 1, 1, diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java index 16034354d0ff2..6d7b1d943f10a 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerIndexingTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.action.index.IndexRequest; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.common.Rounding; import org.elasticsearch.common.time.DateFormatter; @@ -866,16 +865,22 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener } catch (IOException e) { listener.onFailure(e); } - SearchResponseSections sections = new SearchResponseSections( + SearchResponse response = new SearchResponse( null, new Aggregations(Collections.singletonList(result)), null, false, null, null, - 1 + 1, + null, + 1, + 1, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + null ); - SearchResponse response = new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); listener.onResponse(response); } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java index 6fb40541330b2..f858544e4dd2b 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerStateTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; @@ -106,16 +105,22 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return null; } })); - final SearchResponseSections sections = new SearchResponseSections( + final SearchResponse response = new SearchResponse( new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), aggs, null, false, null, null, - 1 + 1, + null, + 1, + 1, + 0, + 0, + new ShardSearchFailure[0], + null ); - final SearchResponse response = new SearchResponse(sections, null, 1, 1, 0, 0, new ShardSearchFailure[0], null); nextPhase.onResponse(response); } @@ -222,8 +227,7 @@ protected void doNextSearch(long waitTimeInNanos, ActionListener } try { - SearchResponse response = searchFunction.apply(buildSearchRequest()); - nextPhase.onResponse(response); + ActionListener.respondAndRelease(nextPhase, searchFunction.apply(buildSearchRequest())); } catch (Exception e) { nextPhase.onFailure(e); } @@ -473,17 +477,25 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return null; } })); - final SearchResponseSections sections = new SearchResponseSections( - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), - aggs, - null, - false, - null, - null, - 1 + ActionListener.respondAndRelease( + nextPhase, + new SearchResponse( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), + aggs, + null, + false, + null, + null, + 1, + null, + 1, + 1, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + null + ) ); - final SearchResponse response = new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); - nextPhase.onResponse(response); } @Override @@ -684,16 +696,22 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return null; } })); - final SearchResponseSections sections = new SearchResponseSections( + return new SearchResponse( new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), aggs, null, false, null, null, - 1 + 1, + null, + 1, + 1, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + null ); - return new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); }; Function bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); @@ -808,16 +826,22 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return null; } })); - final SearchResponseSections sections = new SearchResponseSections( + return new SearchResponse( new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), aggs, null, false, null, null, - 1 + 1, + null, + 1, + 1, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + null ); - return new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); }; Function bulkFunction = bulkRequest -> new BulkResponse(new BulkItemResponse[0], 100); @@ -981,16 +1005,22 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return null; } })); - final SearchResponseSections sections = new SearchResponseSections( + return new SearchResponse( new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), 0), aggs, null, false, null, null, - 1 + 1, + null, + 1, + 1, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + null ); - return new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); }; Function bulkFunction = bulkRequest -> { diff --git a/x-pack/plugin/searchable-snapshots/qa/url/build.gradle b/x-pack/plugin/searchable-snapshots/qa/url/build.gradle index 12fc0873958e1..850fe85ece3cd 100644 --- a/x-pack/plugin/searchable-snapshots/qa/url/build.gradle +++ b/x-pack/plugin/searchable-snapshots/qa/url/build.gradle @@ -1,12 +1,11 @@ import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE -apply plugin: 'elasticsearch.legacy-java-rest-test' +apply plugin: 'elasticsearch.internal-java-rest-test' apply plugin: 'elasticsearch.rest-resources' -final Project fixture = project(':test:fixtures:nginx-fixture') - dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('searchable-snapshots')))) + javaRestTestImplementation project(':test:fixtures:url-fixture') } restResources { @@ -15,34 +14,6 @@ restResources { } } -apply plugin: 'elasticsearch.test.fixtures' -testFixtures.useFixture(fixture.path, 'nginx-fixture') - -def fixtureAddress = { fixtureName -> - int ephemeralPort = fixture.postProcessFixture.ext."test.fixtures.${fixtureName}.tcp.80" - assert ephemeralPort > 0 - 'http://127.0.0.1:' + ephemeralPort -} - -File repositoryDir = fixture.fsRepositoryDir as File - tasks.named("javaRestTest").configure { - dependsOn fixture.getTasks().named("postProcessFixture") - - nonInputProperties.systemProperty 'test.url.fs.repo.dir', repositoryDir.absolutePath - nonInputProperties.systemProperty 'test.url.http', "${-> fixtureAddress('nginx-fixture')}" -} - -testClusters.matching { it.name == "javaRestTest" }.configureEach { - testDistribution = 'DEFAULT' - setting 'path.repo', repositoryDir.absolutePath, IGNORE_VALUE - setting 'repositories.url.allowed_urls', { "${-> fixtureAddress('nginx-fixture')}" }, IGNORE_VALUE - - setting 'xpack.license.self_generated.type', 'trial' - - setting 'xpack.searchable.snapshot.shared_cache.size', '16MB' - setting 'xpack.searchable.snapshot.shared_cache.region_size', '256KB' - setting 'xpack.searchable_snapshots.cache_fetch_async_thread_pool.keep_alive', '0ms' - - setting 'xpack.security.enabled', 'false' + usesDefaultDistribution() } diff --git a/x-pack/plugin/searchable-snapshots/qa/url/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/URLSearchableSnapshotsIT.java b/x-pack/plugin/searchable-snapshots/qa/url/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/URLSearchableSnapshotsIT.java index b37b71cf95a31..b59dcb3a9d210 100644 --- a/x-pack/plugin/searchable-snapshots/qa/url/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/URLSearchableSnapshotsIT.java +++ b/x-pack/plugin/searchable-snapshots/qa/url/src/javaRestTest/java/org/elasticsearch/xpack/searchablesnapshots/URLSearchableSnapshotsIT.java @@ -7,14 +7,37 @@ package org.elasticsearch.xpack.searchablesnapshots; +import fixture.url.URLFixture; + import org.elasticsearch.common.settings.Settings; import org.elasticsearch.repositories.fs.FsRepository; +import org.elasticsearch.test.cluster.ElasticsearchCluster; +import org.elasticsearch.test.cluster.local.distribution.DistributionType; +import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; import static org.hamcrest.Matchers.blankOrNullString; import static org.hamcrest.Matchers.not; public class URLSearchableSnapshotsIT extends AbstractSearchableSnapshotsRestTestCase { + public static URLFixture urlFixture = new URLFixture(); + + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + .distribution(DistributionType.DEFAULT) + .setting("xpack.license.self_generated.type", "trial") + .setting("repositories.url.allowed_urls", () -> urlFixture.getAddress()) + .setting("path.repo", () -> urlFixture.getRepositoryDir()) + .setting("xpack.searchable.snapshot.shared_cache.size", "16MB") + .setting("xpack.searchable.snapshot.shared_cache.region_size", "256KB") + .setting("xpack.searchable_snapshots.cache_fetch_async_thread_pool.keep_alive", "0ms") + .setting("xpack.security.enabled", "false") + .build(); + + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(urlFixture).around(cluster); + @Override protected String writeRepositoryType() { return FsRepository.TYPE; @@ -22,7 +45,7 @@ protected String writeRepositoryType() { @Override protected Settings writeRepositorySettings() { - final String repoDirectory = System.getProperty("test.url.fs.repo.dir"); + final String repoDirectory = urlFixture.getRepositoryDir(); assertThat(repoDirectory, not(blankOrNullString())); return Settings.builder().put("location", repoDirectory).build(); @@ -40,9 +63,14 @@ protected String readRepositoryType() { @Override protected Settings readRepositorySettings() { - final String url = System.getProperty("test.url.http"); + final String url = urlFixture.getAddress(); assertThat(url, not(blankOrNullString())); return Settings.builder().put("url", url).build(); } + + @Override + protected String getTestRestCluster() { + return cluster.getHttpAddresses(); + } } diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java index 5ef524f8211c1..18b4e6ed7cb31 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/FrozenSearchableSnapshotsIntegTests.java @@ -208,12 +208,12 @@ public void testCreateAndRestorePartialSearchableSnapshot() throws Exception { for (ShardStats shardStats : indicesStatsResponse.getShards()) { StoreStats store = shardStats.getStats().getStore(); - assertThat(shardStats.getShardRouting().toString(), store.getReservedSize().getBytes(), equalTo(0L)); - assertThat(shardStats.getShardRouting().toString(), store.getSize().getBytes(), equalTo(0L)); + assertThat(shardStats.getShardRouting().toString(), store.reservedSizeInBytes(), equalTo(0L)); + assertThat(shardStats.getShardRouting().toString(), store.sizeInBytes(), equalTo(0L)); } if (indicesStatsResponse.getShards().length > 0) { - assertThat(indicesStatsResponse.getTotal().getStore().getReservedSize().getBytes(), equalTo(0L)); - assertThat(indicesStatsResponse.getTotal().getStore().getSize().getBytes(), equalTo(0L)); + assertThat(indicesStatsResponse.getTotal().getStore().reservedSizeInBytes(), equalTo(0L)); + assertThat(indicesStatsResponse.getTotal().getStore().sizeInBytes(), equalTo(0L)); } } }, "test-stats-watcher"); @@ -251,8 +251,8 @@ public void testCreateAndRestorePartialSearchableSnapshot() throws Exception { StoreStats store = shardStats.getStats().getStore(); final ShardRouting shardRouting = shardStats.getShardRouting(); - assertThat(shardRouting.toString(), store.getReservedSize().getBytes(), equalTo(0L)); - assertThat(shardRouting.toString(), store.getSize().getBytes(), equalTo(0L)); + assertThat(shardRouting.toString(), store.reservedSizeInBytes(), equalTo(0L)); + assertThat(shardRouting.toString(), store.sizeInBytes(), equalTo(0L)); // the original shard size from the snapshot final long originalSize = snapshotShards.get(shardRouting.getId()).getStats().getTotalSize(); @@ -273,11 +273,11 @@ public void testCreateAndRestorePartialSearchableSnapshot() throws Exception { final ByteBuffersDirectory inMemoryDir = (ByteBuffersDirectory) unwrappedDir; assertThat(inMemoryDir.listAll(), arrayWithSize(1)); - assertThat(shardRouting.toString(), store.getTotalDataSetSize().getBytes(), equalTo(originalSize)); + assertThat(shardRouting.toString(), store.totalDataSetSizeInBytes(), equalTo(originalSize)); } final StoreStats store = indicesStatsResponse.getTotal().getStore(); - assertThat(store.getTotalDataSetSize().getBytes(), equalTo(totalExpectedSize)); + assertThat(store.totalDataSetSizeInBytes(), equalTo(totalExpectedSize)); statsWatcherRunning.set(false); statsWatcher.join(); diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/RetrySearchIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/RetrySearchIntegTests.java index c80cf3c3d62e3..e3b631ba69c8a 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/RetrySearchIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/RetrySearchIntegTests.java @@ -143,23 +143,18 @@ public void testRetryPointInTime() throws Exception { ).keepAlive(TimeValue.timeValueMinutes(2)); final String pitId = client().execute(TransportOpenPointInTimeAction.TYPE, openRequest).actionGet().getPointInTimeId(); try { - assertNoFailuresAndResponse( - prepareSearch().setIndices(indexName).setPreference(null).setPointInTime(new PointInTimeBuilder(pitId)), - resp -> { - assertThat(resp.pointInTimeId(), equalTo(pitId)); - assertHitCount(resp, docCount); - } - ); + assertNoFailuresAndResponse(prepareSearch().setPointInTime(new PointInTimeBuilder(pitId)), resp -> { + assertThat(resp.pointInTimeId(), equalTo(pitId)); + assertHitCount(resp, docCount); + }); final Set allocatedNodes = internalCluster().nodesInclude(indexName); for (String allocatedNode : allocatedNodes) { internalCluster().restartNode(allocatedNode); } ensureGreen(indexName); assertNoFailuresAndResponse( - prepareSearch().setIndices(indexName) - .setQuery(new RangeQueryBuilder("created_date").gte("2011-01-01").lte("2011-12-12")) + prepareSearch().setQuery(new RangeQueryBuilder("created_date").gte("2011-01-01").lte("2011-12-12")) .setSearchType(SearchType.QUERY_THEN_FETCH) - .setPreference(null) .setPreFilterShardSize(between(1, 10)) .setAllowPartialSearchResults(true) .setPointInTime(new PointInTimeBuilder(pitId)), diff --git a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java index 876ff9ebdb86f..38222f64b282b 100644 --- a/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java +++ b/x-pack/plugin/searchable-snapshots/src/internalClusterTest/java/org/elasticsearch/xpack/searchablesnapshots/SearchableSnapshotsIntegTests.java @@ -498,7 +498,7 @@ private Map getMaxShardSizeByNodeInBytes(String indexName) { IndexStats indexStats = indicesStats.getIndex(indexName); Map maxShardSizeByNode = new HashMap<>(); for (ShardStats shard : indexStats.getShards()) { - long sizeInBytes = shard.getStats().getStore().getSizeInBytes(); + long sizeInBytes = shard.getStats().getStore().sizeInBytes(); if (sizeInBytes > 0) { maxShardSizeByNode.compute( shard.getShardRouting().currentNodeId(), diff --git a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java index 89cab65765bf9..1713be9feac65 100644 --- a/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java +++ b/x-pack/plugin/searchable-snapshots/src/main/java/org/elasticsearch/xpack/searchablesnapshots/cache/blob/BlobStoreCacheMaintenanceService.java @@ -10,10 +10,10 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRunnable; import org.elasticsearch.action.bulk.BulkAction; import org.elasticsearch.action.bulk.BulkItemResponse; import org.elasticsearch.action.bulk.BulkRequest; -import org.elasticsearch.action.bulk.BulkResponse; import org.elasticsearch.action.delete.DeleteRequest; import org.elasticsearch.action.delete.DeleteResponse; import org.elasticsearch.action.search.ClosePointInTimeRequest; @@ -25,6 +25,7 @@ import org.elasticsearch.action.search.TransportClosePointInTimeAction; import org.elasticsearch.action.search.TransportOpenPointInTimeAction; import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.support.RefCountingListener; import org.elasticsearch.action.support.TransportActions; import org.elasticsearch.client.internal.Client; import org.elasticsearch.client.internal.OriginSettingClient; @@ -37,16 +38,16 @@ import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.service.ClusterService; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; +import org.elasticsearch.common.util.concurrent.ThrottledTaskRunner; +import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Nullable; -import org.elasticsearch.core.Releasable; -import org.elasticsearch.core.Releasables; +import org.elasticsearch.core.RefCounted; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.Index; @@ -73,9 +74,8 @@ import java.util.Objects; import java.util.Queue; import java.util.Set; -import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; -import java.util.concurrent.atomic.AtomicReference; +import java.util.function.BiConsumer; import java.util.stream.Collectors; import java.util.stream.IntStream; @@ -88,10 +88,10 @@ /** * A service that delete documents in the snapshot blob cache index when they are not required anymore. - * + *

* This service runs on the data node that contains the snapshot blob cache primary shard. It listens to cluster state updates to find * searchable snapshot indices that are deleted and checks if the index snapshot is still used by other searchable snapshot indices. If the - * index snapshot is not used anymore then i triggers the deletion of corresponding cached blobs in the snapshot blob cache index using a + * index snapshot is not used anymore then it triggers the deletion of corresponding cached blobs in the snapshot blob cache index using a * delete-by-query. */ public class BlobStoreCacheMaintenanceService implements ClusterStateListener { @@ -266,6 +266,10 @@ private static boolean hasSearchableSnapshotWith(final ClusterState state, final return false; } + private static Instant getExpirationTime(TimeValue retention, ThreadPool threadPool) { + return Instant.ofEpochMilli(threadPool.absoluteTimeInMillis()).minus(retention.duration(), retention.timeUnit().toChronoUnit()); + } + private static Map> listSearchableSnapshots(final ClusterState state) { Map> snapshots = null; for (IndexMetadata indexMetadata : state.metadata()) { @@ -396,315 +400,264 @@ public void onFailure(Exception e) { /** * A maintenance task that periodically cleans up unused cache entries from the blob store cache index. - * + *

* This task first opens a point-in-time context on the blob store cache system index and uses it to search all documents. For each * document found the task verifies if it belongs to an existing searchable snapshot index. If the doc does not belong to any * index then it is deleted as part of a bulk request. Once the bulk is executed the next batch of documents is searched for. Once * all documents from the PIT have been verified the task closes the PIT and completes itself. - * + *

* The task executes every step (PIT opening, searches, bulk deletes, PIT closing) using the generic thread pool. * The same task instance is used for all the steps and makes sure that a closed instance is not executed again. */ - private class PeriodicMaintenanceTask implements Runnable, Releasable { - + private class PeriodicMaintenanceTask implements Runnable { private final TimeValue keepAlive; private final int batchSize; - private final AtomicReference error = new AtomicReference<>(); - private final AtomicBoolean closed = new AtomicBoolean(); + private final ThrottledTaskRunner taskRunner; private final AtomicLong deletes = new AtomicLong(); private final AtomicLong total = new AtomicLong(); - private volatile Map> existingSnapshots; - private volatile Set existingRepositories; - private final AtomicReference searchResponse = new AtomicReference<>(); - private volatile Instant expirationTime; - private volatile String pointIntTimeId; - private volatile Object[] searchAfter; - PeriodicMaintenanceTask(TimeValue keepAlive, int batchSize) { this.keepAlive = keepAlive; this.batchSize = batchSize; + this.taskRunner = new ThrottledTaskRunner(this.getClass().getCanonicalName(), 2, threadPool.generic()); } @Override public void run() { - assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.GENERIC); - try { - ensureOpen(); - if (pointIntTimeId == null) { - final OpenPointInTimeRequest openRequest = new OpenPointInTimeRequest(SNAPSHOT_BLOB_CACHE_INDEX); - openRequest.keepAlive(keepAlive); - clientWithOrigin.execute(TransportOpenPointInTimeAction.TYPE, openRequest, new ActionListener<>() { - @Override - public void onResponse(OpenPointInTimeResponse response) { - logger.trace("periodic maintenance task initialized with point-in-time id [{}]", response.getPointInTimeId()); - PeriodicMaintenanceTask.this.pointIntTimeId = response.getPointInTimeId(); - executeNext(PeriodicMaintenanceTask.this); - } - - @Override - public void onFailure(Exception e) { - if (TransportActions.isShardNotAvailableException(e)) { - complete(null); - } else { - complete(e); - } - } - }); - return; + ActionListener.run(ActionListener.runAfter(new ActionListener() { + @Override + public void onResponse(Void unused) { + logger.info( + () -> format( + "periodic maintenance task completed (%s deleted documents out of a total of %s)", + deletes.get(), + total.get() + ) + ); } - final String pitId = pointIntTimeId; - assert Strings.hasLength(pitId); - - SearchResponse searchResponseRef; - do { - searchResponseRef = searchResponse.get(); - if (searchResponseRef == null) { - handleMissingSearchResponse(pitId); - return; + @Override + public void onFailure(Exception e) { + logger.warn( + () -> format( + "periodic maintenance task completed with failure (%s deleted documents out of a total of %s)", + deletes.get(), + total.get() + ), + e + ); + } + }, BlobStoreCacheMaintenanceService.this::startPeriodicTask), listener -> { + final OpenPointInTimeRequest openRequest = new OpenPointInTimeRequest(SNAPSHOT_BLOB_CACHE_INDEX); + openRequest.keepAlive(keepAlive); + clientWithOrigin.execute(TransportOpenPointInTimeAction.TYPE, openRequest, new ActionListener<>() { + @Override + public void onResponse(OpenPointInTimeResponse response) { + logger.trace("periodic maintenance task initialized with point-in-time id [{}]", response.getPointInTimeId()); + threadPool.generic().execute(ActionRunnable.wrap(listener, l -> { + final ClusterState state = clusterService.state(); + new RunningPeriodicMaintenanceTask( + response.getPointInTimeId(), + closingPitBefore(clientWithOrigin, response.getPointInTimeId(), l), + getExpirationTime(periodicTaskRetention, threadPool), + // compute the list of existing searchable snapshots and repositories up-front + listSearchableSnapshots(state), + RepositoriesMetadata.get(state) + .repositories() + .stream() + .map(RepositoryMetadata::name) + .collect(Collectors.toSet()) + ).run(); + })); } - } while (searchResponseRef.tryIncRef() == false); - try { - var searchHits = searchResponseRef.getHits().getHits(); - if (searchHits != null && searchHits.length > 0) { - updateWithSearchHits(searchHits); - return; + + @Override + public void onFailure(Exception e) { + if (TransportActions.isShardNotAvailableException(e)) { + listener.onResponse(null); + } else { + listener.onFailure(e); + } } - } finally { - searchResponseRef.decRef(); + }); + }); + } + + private static ActionListener closingPitBefore(Client client, String pointInTimeId, ActionListener listener) { + return new ActionListener<>() { + @Override + public void onResponse(Void unused) { + closePit(client, pointInTimeId, () -> listener.onResponse(null)); } - // we're done, complete the task - complete(null); - } catch (Exception e) { - complete(e); - } + + @Override + public void onFailure(Exception e) { + closePit(client, pointInTimeId, () -> listener.onFailure(e)); + } + }; } - private void handleMissingSearchResponse(String pitId) { - final SearchSourceBuilder searchSource = new SearchSourceBuilder(); - searchSource.fetchField(new FieldAndFormat(CachedBlob.CREATION_TIME_FIELD, "epoch_millis")); - searchSource.fetchSource(false); - searchSource.trackScores(false); - searchSource.sort(ShardDocSortField.NAME); - searchSource.size(batchSize); - if (searchAfter != null) { - searchSource.searchAfter(searchAfter); - searchSource.trackTotalHits(false); - } else { - searchSource.trackTotalHits(true); - } - final PointInTimeBuilder pointInTime = new PointInTimeBuilder(pitId); - pointInTime.setKeepAlive(keepAlive); - searchSource.pointInTimeBuilder(pointInTime); - final SearchRequest searchRequest = new SearchRequest(); - searchRequest.source(searchSource); - clientWithOrigin.execute(TransportSearchAction.TYPE, searchRequest, new ActionListener<>() { + private static void closePit(Client client, String pointInTimeId, Runnable onCompletion) { + client.execute(TransportClosePointInTimeAction.TYPE, new ClosePointInTimeRequest(pointInTimeId), new ActionListener<>() { @Override - public void onResponse(SearchResponse response) { - if (searchAfter == null) { - assert PeriodicMaintenanceTask.this.total.get() == 0L; - PeriodicMaintenanceTask.this.total.set(response.getHits().getTotalHits().value); + public void onResponse(ClosePointInTimeResponse response) { + if (response.isSucceeded()) { + logger.debug("periodic maintenance task successfully closed point-in-time id [{}]", pointInTimeId); + } else { + logger.debug("point-in-time id [{}] not found", pointInTimeId); } - PeriodicMaintenanceTask.this.setCurrentResponse(response); - PeriodicMaintenanceTask.this.searchAfter = null; - executeNext(PeriodicMaintenanceTask.this); + onCompletion.run(); } @Override public void onFailure(Exception e) { - complete(e); + logger.warn(() -> "failed to close point-in-time id [" + pointInTimeId + "]", e); + onCompletion.run(); } }); } - private void updateWithSearchHits(SearchHit[] searchHits) { - if (expirationTime == null) { - final TimeValue retention = periodicTaskRetention; - expirationTime = Instant.ofEpochMilli(threadPool.absoluteTimeInMillis()) - .minus(retention.duration(), retention.timeUnit().toChronoUnit()); - - final ClusterState state = clusterService.state(); - // compute the list of existing searchable snapshots and repositories once - existingSnapshots = listSearchableSnapshots(state); - existingRepositories = RepositoriesMetadata.get(state) - .repositories() - .stream() - .map(RepositoryMetadata::name) - .collect(Collectors.toSet()); + /** + * The maintenance task, once it has opened its PIT and started running so that it has all the state it needs to do its job. + */ + private class RunningPeriodicMaintenanceTask implements Runnable { + private final String pointInTimeId; + private final RefCountingListener listeners; + private final Instant expirationTime; + private final Map> existingSnapshots; + private final Set existingRepositories; + + RunningPeriodicMaintenanceTask( + String pointInTimeId, + ActionListener listener, + Instant expirationTime, + Map> existingSnapshots, + Set existingRepositories + ) { + this.pointInTimeId = pointInTimeId; + this.listeners = new RefCountingListener(listener); + this.expirationTime = expirationTime; + this.existingSnapshots = existingSnapshots; + this.existingRepositories = existingRepositories; } - final BulkRequest bulkRequest = new BulkRequest(); - final Map> knownSnapshots = existingSnapshots; - assert knownSnapshots != null; - final Set knownRepositories = existingRepositories; - assert knownRepositories != null; - final Instant expirationTimeCopy = this.expirationTime; - assert expirationTimeCopy != null; - - Object[] lastSortValues = null; - for (SearchHit searchHit : searchHits) { - lastSortValues = searchHit.getSortValues(); - assert searchHit.getId() != null; - try { - boolean delete = false; - - // See {@link BlobStoreCacheService#generateId} - // doc id = {repository name}/{snapshot id}/{snapshot index id}/{shard id}/{file name}/@{file offset} - final String[] parts = Objects.requireNonNull(searchHit.getId()).split("/"); - assert parts.length == 6 : Arrays.toString(parts) + " vs " + searchHit.getId(); - - final String repositoryName = parts[0]; - if (knownRepositories.contains(repositoryName) == false) { - logger.trace("deleting blob store cache entry with id [{}]: repository does not exist", searchHit.getId()); - delete = true; - } else { - final Set knownIndexIds = knownSnapshots.get(parts[1]); - if (knownIndexIds == null || knownIndexIds.contains(parts[2]) == false) { - logger.trace("deleting blob store cache entry with id [{}]: not used", searchHit.getId()); - delete = true; - } - } - if (delete) { - final Instant creationTime = getCreationTime(searchHit); - if (creationTime.isAfter(expirationTimeCopy)) { - logger.trace("blob store cache entry with id [{}] was created recently, skipping deletion", searchHit.getId()); - continue; - } - bulkRequest.add(new DeleteRequest().index(searchHit.getIndex()).id(searchHit.getId())); - } - } catch (Exception e) { - logger.warn(() -> format("exception when parsing blob store cache entry with id [%s], skipping", searchHit.getId()), e); + @Override + public void run() { + assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.GENERIC); + try (listeners) { + executeSearch(new SearchRequest().source(getSearchSourceBuilder().trackTotalHits(true)), (searchResponse, refs) -> { + assert total.get() == 0L; + total.set(searchResponse.getHits().getTotalHits().value); + handleSearchResponse(searchResponse, refs); + }); } } - assert lastSortValues != null; - if (bulkRequest.numberOfActions() == 0) { - setCurrentResponse(null); - this.searchAfter = lastSortValues; - executeNext(this); - return; + private void executeSearch(SearchRequest searchRequest, BiConsumer responseConsumer) { + clientWithOrigin.execute(TransportSearchAction.TYPE, searchRequest, listeners.acquire(searchResponse -> { + searchResponse.mustIncRef(); + taskRunner.enqueueTask(ActionListener.runAfter(listeners.acquire(ref -> { + final var refs = AbstractRefCounted.of(ref::close); + try { + responseConsumer.accept(searchResponse, refs); + } finally { + refs.decRef(); + } + }), searchResponse::decRef)); + })); + } + + private SearchSourceBuilder getSearchSourceBuilder() { + return new SearchSourceBuilder().fetchField(new FieldAndFormat(CachedBlob.CREATION_TIME_FIELD, "epoch_millis")) + .fetchSource(false) + .trackScores(false) + .sort(ShardDocSortField.NAME) + .size(batchSize) + .pointInTimeBuilder(new PointInTimeBuilder(pointInTimeId).setKeepAlive(keepAlive)); } - final Object[] finalSearchAfter = lastSortValues; - clientWithOrigin.execute(BulkAction.INSTANCE, bulkRequest, new ActionListener<>() { - @Override - public void onResponse(BulkResponse response) { - for (BulkItemResponse itemResponse : response.getItems()) { - if (itemResponse.isFailed() == false) { - assert itemResponse.getResponse() instanceof DeleteResponse; - deletes.incrementAndGet(); - } - } - PeriodicMaintenanceTask.this.setCurrentResponse(null); - PeriodicMaintenanceTask.this.searchAfter = finalSearchAfter; - executeNext(PeriodicMaintenanceTask.this); + private void handleSearchResponse(SearchResponse searchResponse, RefCounted refs) { + assert ThreadPool.assertCurrentThreadPool(ThreadPool.Names.GENERIC); + + if (listeners.isFailing()) { + return; } - @Override - public void onFailure(Exception e) { - complete(e); + final var searchHits = searchResponse.getHits().getHits(); + if (searchHits == null || searchHits.length == 0) { + return; } - }); - } - public boolean isClosed() { - return closed.get(); - } + final BulkRequest bulkRequest = new BulkRequest(); - private void ensureOpen() { - if (isClosed()) { - assert false : "should not use periodic task after close"; - throw new IllegalStateException("Periodic maintenance task is closed"); - } - } + Object[] lastSortValues = null; + for (SearchHit searchHit : searchHits) { + lastSortValues = searchHit.getSortValues(); + assert searchHit.getId() != null; + try { + boolean delete = false; - @Override - public void close() { - if (closed.compareAndSet(false, true)) { - setCurrentResponse(null); - final Exception e = error.get(); - if (e != null) { - logger.warn( - () -> format( - "periodic maintenance task completed with failure (%s deleted documents out of a total of %s)", - deletes.get(), - total.get() - ), - e - ); - } else { - logger.info( - () -> format( - "periodic maintenance task completed (%s deleted documents out of a total of %s)", - deletes.get(), - total.get() - ) - ); - } - } - } + // See {@link BlobStoreCacheService#generateId} + // doc id = {repository name}/{snapshot id}/{snapshot index id}/{shard id}/{file name}/@{file offset} + final String[] parts = Objects.requireNonNull(searchHit.getId()).split("/"); + assert parts.length == 6 : Arrays.toString(parts) + " vs " + searchHit.getId(); - private void complete(@Nullable Exception failure) { - assert isClosed() == false; - final Releasable releasable = () -> { - try { - final Exception previous = error.getAndSet(failure); - assert previous == null : "periodic maintenance task already failed: " + previous; - close(); - } finally { - startPeriodicTask(); + final String repositoryName = parts[0]; + if (existingRepositories.contains(repositoryName) == false) { + logger.trace("deleting blob store cache entry with id [{}]: repository does not exist", searchHit.getId()); + delete = true; + } else { + final Set knownIndexIds = existingSnapshots.get(parts[1]); + if (knownIndexIds == null || knownIndexIds.contains(parts[2]) == false) { + logger.trace("deleting blob store cache entry with id [{}]: not used", searchHit.getId()); + delete = true; + } + } + if (delete) { + final Instant creationTime = getCreationTime(searchHit); + if (creationTime.isAfter(expirationTime)) { + logger.trace( + "blob store cache entry with id [{}] was created recently, skipping deletion", + searchHit.getId() + ); + continue; + } + bulkRequest.add(new DeleteRequest().index(searchHit.getIndex()).id(searchHit.getId())); + } + } catch (Exception e) { + logger.warn( + () -> format("exception when parsing blob store cache entry with id [%s], skipping", searchHit.getId()), + e + ); + } } - }; - boolean waitForRelease = false; - try { - final String pitId = pointIntTimeId; - if (Strings.hasLength(pitId)) { - final ClosePointInTimeRequest closeRequest = new ClosePointInTimeRequest(pitId); + + if (bulkRequest.numberOfActions() > 0) { + refs.mustIncRef(); clientWithOrigin.execute( - TransportClosePointInTimeAction.TYPE, - closeRequest, - ActionListener.runAfter(new ActionListener<>() { - @Override - public void onResponse(ClosePointInTimeResponse response) { - if (response.isSucceeded()) { - logger.debug("periodic maintenance task successfully closed point-in-time id [{}]", pitId); - } else { - logger.debug("point-in-time id [{}] not found", pitId); + BulkAction.INSTANCE, + bulkRequest, + ActionListener.releaseAfter(listeners.acquire(bulkResponse -> { + for (BulkItemResponse itemResponse : bulkResponse.getItems()) { + if (itemResponse.isFailed() == false) { + assert itemResponse.getResponse() instanceof DeleteResponse; + deletes.incrementAndGet(); } } - - @Override - public void onFailure(Exception e) { - logger.warn(() -> "failed to close point-in-time id [" + pitId + "]", e); - } - }, () -> Releasables.close(releasable)) + }), refs::decRef) ); - waitForRelease = true; - } - } finally { - if (waitForRelease == false) { - Releasables.close(releasable); } - } - } - private void setCurrentResponse(SearchResponse response) { - if (response != null) { - response.mustIncRef(); - } - var previous = searchResponse.getAndSet(response); - if (previous != null) { - previous.decRef(); + assert lastSortValues != null; + executeSearch( + new SearchRequest().source(getSearchSourceBuilder().trackTotalHits(false).searchAfter(lastSortValues)), + this::handleSearchResponse + ); } } } - private void executeNext(PeriodicMaintenanceTask maintenanceTask) { - threadPool.generic().execute(maintenanceTask); - } - private static Instant getCreationTime(SearchHit searchHit) { final DocumentField creationTimeField = searchHit.field(CachedBlob.CREATION_TIME_FIELD); assert creationTimeField != null; diff --git a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java index 9c2fd118d59d9..52d87c2e32c87 100644 --- a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java +++ b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtRestIT.java @@ -89,32 +89,39 @@ public class JwtRestIT extends ESRestTestCase { put("xpack.security.authc.realms.jwt.jwt2.client_authentication.shared_secret", VALID_SHARED_SECRET); } }; + private static final String KEYSTORE_PASSWORD = "keystore-password"; @ClassRule public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .nodes(2) .distribution(DistributionType.DEFAULT) + .keystorePassword(KEYSTORE_PASSWORD) .configFile("http.key", Resource.fromClasspath("ssl/http.key")) .configFile("http.crt", Resource.fromClasspath("ssl/http.crt")) .configFile("ca.crt", Resource.fromClasspath("ssl/ca.crt")) + .configFile("ca-transport.crt", Resource.fromClasspath("ssl/ca-transport.crt")) + .configFile("transport.key", Resource.fromClasspath("ssl/transport.key")) + .configFile("transport.crt", Resource.fromClasspath("ssl/transport.crt")) .configFile("rsa.jwkset", Resource.fromClasspath("jwk/rsa-public-jwkset.json")) .setting("xpack.ml.enabled", "false") .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.enabled", "true") - .setting("xpack.security.http.ssl.enabled", "true") - .setting("xpack.security.transport.ssl.enabled", "false") + .setting("xpack.security.transport.ssl.enabled", "true") + .setting("xpack.security.transport.ssl.certificate", "transport.crt") + .setting("xpack.security.transport.ssl.key", "transport.key") + .setting("xpack.security.transport.ssl.certificate_authorities", "ca-transport.crt") .setting("xpack.security.authc.token.enabled", "true") .setting("xpack.security.authc.api_key.enabled", "true") - .setting("xpack.security.http.ssl.enabled", "true") .setting("xpack.security.http.ssl.certificate", "http.crt") .setting("xpack.security.http.ssl.key", "http.key") - .setting("xpack.security.http.ssl.key_passphrase", "http-password") .setting("xpack.security.http.ssl.certificate_authorities", "ca.crt") .setting("xpack.security.http.ssl.client_authentication", "optional") .settings(JwtRestIT::realmSettings) .keystore("xpack.security.authc.realms.jwt.jwt2.hmac_key", HMAC_PASSPHRASE) .keystore("xpack.security.authc.realms.jwt.jwt3.hmac_jwkset", HMAC_JWKSET) + .keystore("xpack.security.http.ssl.secure_key_passphrase", "http-password") + .keystore("xpack.security.transport.ssl.secure_key_passphrase", "transport-password") .keystore("xpack.security.authc.realms.jwt.jwt3.client_authentication.shared_secret", VALID_SHARED_SECRET) .keystore(keystoreSettings) .user("admin_user", "admin-password") @@ -508,7 +515,6 @@ public void testAuthenticationFailureIfDelegatedAuthorizationFails() throws Exce } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103308") public void testReloadClientSecret() throws Exception { final String principal = SERVICE_SUBJECT.get(); final String username = getUsernameFromPrincipal(principal); @@ -562,7 +568,9 @@ private void writeSettingToKeystoreThenReload(String setting, @Nullable String v keystoreSettings.put(setting, value); } cluster.updateStoredSecureSettings(); - assertOK(adminClient().performRequest(new Request("POST", "/_nodes/reload_secure_settings"))); + final var reloadRequest = new Request("POST", "/_nodes/reload_secure_settings"); + reloadRequest.setJsonEntity("{\"secure_settings_password\":\"" + KEYSTORE_PASSWORD + "\"}"); + assertOK(adminClient().performRequest(reloadRequest)); } public void testFailureOnInvalidClientAuthentication() throws Exception { diff --git a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/README.asciidoc b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/README.asciidoc index 37185a996fbba..cb1aadc94e4ac 100644 --- a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/README.asciidoc +++ b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/README.asciidoc @@ -28,6 +28,20 @@ rm http.zip rmdir http ----------------------------------------------------------------------------------------------------------- +[source,shell] +----------------------------------------------------------------------------------------------------------- +elasticsearch-certutil cert --pem --name=transport --out=transport.zip --pass="transport-password" --days=3500 \ + --ca-cert=ca-transport.crt --ca-key=ca-transport.key --ca-pass="ca-password" \ + --dns=localhost --dns=localhost.localdomain --dns=localhost4 --dns=localhost4.localdomain4 --dns=localhost6 --dns=localhost6.localdomain6 \ + --ip=127.0.0.1 --ip=0:0:0:0:0:0:0:1 + +unzip transport.zip +mv transport/transport.* ./ + +rm transport.zip +rmdir transport +----------------------------------------------------------------------------------------------------------- + [source,shell] ----------------------------------------------------------------------------------------------------------- elasticsearch-certutil cert --pem --name=pki --out=${PWD}/pki.zip --pass="pki-password" --days=3500 \ diff --git a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/ca-transport.crt b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/ca-transport.crt new file mode 100644 index 0000000000000..320f00ccde59b --- /dev/null +++ b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/ca-transport.crt @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDSTCCAjGgAwIBAgIUGuBmPtwyEv7WZ1H0Yy5vyEEYVR8wDQYJKoZIhvcNAQEL +BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l +cmF0ZWQgQ0EwHhcNMjAwNDA3MTEzMDA1WhcNMjkxMTA2MTEzMDA1WjA0MTIwMAYD +VQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALeTNx0a6X+Fhf6IQj4ggN9U +1HGIzJKEHGIpATDgbfdIv88e0O0I6HN7pmLf5LuUPDGc2oLGnxqATgnFek5eJ4QW +sKgflGB4C0EgQH4JAooIG0EI6aj3IcdzBwH8bdymAdsGj0Zcvm6wjhLixgiN3yIM +8KJAtJrSCITI88gfXhXyU0XCSzgruFkdvHjFBCWpCaK3hnjoiO65186PcGbrZHB8 +Izs2soa6H1AHVDMhmJjlwJWYtibjok+sgrjkDWG7cBh6Al7yXGUBOs9SgMXUpI3Y +0r/dDdeISdI5VzwKZpX6qYcNJI+jtgZUD0alMKBxjq3+v8GlDE/QVNyDwp/7SA8C +AwEAAaNTMFEwHQYDVR0OBBYEFMdSVLWtAhqfDXRQj+5o80nK1XaQMB8GA1UdIwQY +MBaAFMdSVLWtAhqfDXRQj+5o80nK1XaQMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZI +hvcNAQELBQADggEBAEiaX+JtgDb6PuPqO9NtxuPEsg6Ghj0dVYLOYE9cmKS9xeyy +sIDJbPl8vy7gomVBDYe+3qM2UElKFdApKGjOctjNlH8npkQxg72sarBUzOfuFPx5 +Z6u5TuKMfqjx2dwpTr3Yp/iUFs9e9zcDMnQYt5B6SCjKpKKodKKh+CE6wOg4nbSF +43OYO9HFHfwIlEqvof7j0r5gnPf5fYSYybYFAqy4oAfpESPq9lJuEvA46TrGpmP6 +IpMYkJJ6O+98A7RHo5kstZJdnG5paAKobdPEYxbIZvRyMJ8IxW8kSAaTKsK7W34k +IYciDd/YY3R+nhnh8F5DjVcyc79Zkv9Cjig/OxQ= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/ca-transport.key b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/ca-transport.key new file mode 100644 index 0000000000000..6bbdd44d274df --- /dev/null +++ b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/ca-transport.key @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-128-CBC,8209E02F62E3909502FECF5E5E9CF7A7 + +EdOFZ6/z/e4elfeAKs2B++/Px/IpiZdmiseZPjfwa6jgpY+8sehmze5+34vrxYJT +cMBH3QafmhdQZ4/Eo7DVFONrjJ3OmD5//ZiTIujTPwMsgGAdeq0yMC0cDkzg8SQ7 +KvTh0PY0feC6bVsY+YjDprDfpqIWf89F8ikgat9cmucV9YO3RbYnxgxRIztbHLP3 +GenAtdG+v7DzdefAdRQktBSNldkadsY6d/kVBknOHcA4pB/UtDpz77ZF40CNB95z +1Tr37nNnuRBUNHbKklXuozkvYLah66tFxA5v7Rf6F37d2QGBkgDphg/QMbJrrB+q +MsfiXeXqRaCzBN/ZuzTQAdQ/67XpQ+Ax89UOiT6SkKBKN1uNDk7Juzv5zHrq7aWS +aj1qtHDG2vMB+UM5A1MngD1LtXzs21Q0+9a2UT83x+VIP0hVq2uKmO8wAQ9gbBe9 +wkBPca4gLYlbIMWzaAe4DV0rcmux+i7Ezk8oVYW1JcoGjoZ3f9KewIQynBUlXXuO +EzSl4R3yiF/birrK9Lo6c9hOcQKCW2qAX73BKq8PjKgWT3rnqzg97q9PPK/vaien +fwSrTXDgEoy1RCwsPsxjyRf0LGFYLUFRVqrbFPhhjg4aEiuzawcpvRxjorC5UX0L +dpImNssdALDd0BbiqAbChUryFSSxFhQ2yo6hfUXZevD236b09V0jUpnZeyQjeTTk +fhhAUUpnd1YzWuYneD2JZQKvGdgWgYRyEKParFeHLjp95rXNWPSOgoAM+w0fFEjq +zkYQMaDGSnUWbc6LVv2exyRIRTrLAWamKnne7z8VxzetqXXmuX0WJb2lFiYMUw4/ +wf31RA8ZsVSgb9werSyPD9aRe/+YZM+kM3/3MC4jJGc6OJuDqEOhhB06L2Df2AWU +UQwZ7y+2yUC1kcFzc8+oT1TNgBHixouY+oqWkhbdCkbUFUe4FwXNXrMyrY9gZs1/ +PEkhVxxYgpLwifkbfQRJPeJvXxh7NxeolXyISaVENdLkMMYUhdsKTa+GOQbO2yfa +4BhOwAqJvyDFfsRxLiDlbxjzvY5qnMl0e/q8wZ60onHJOFCTCfm2BNx7sW+Sk5Kx +zm0Rxsz4rIIxA5S6zbbdsHxjTC9XiUelKaq+W0XTg76USYneORQNN/Mk9sCXvTud +HUqmSf1wREA1PdEcoJ3tMoAOZWGY43/IrdoG3bTNT96AdToD+D+Or8M2VcOZorVf +c3IRNfxGv2/SwhxW/z4tSLSToSJlt4QKxU9Xzm4UundDy1cHmS1faN6+bBnI5+/F +OKwzPCCUJ6H02CAjx2P/P6YEjoLl8B+7h4whlOfT/+IQbzOcGMpPyGu4jSf1KffA +asAQeBvYTx0QPdv2E7e216RLOlp/ERMzkUvF1G7UYKF7Ao6cUpSH6nvGABPLKNXV +fqjpWq8O4R1UEUXi6dqF1HfAHllI+vMw7LzRJK/5zVrWlJPm4c/Rng5OkK7aAGee +J0eTSlCdNpyaZzjyk2ZAQ54kZVqAS90zS1zo6lg2v9yfAfz6eYlfl2OGfFVG40Jt +oYxEVcG9LeD3XOkPOnTblHdKMor8cQt+TEJPu9eM31ay1QSilixx2yfOOFTgJZOi +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/transport.crt b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/transport.crt new file mode 100644 index 0000000000000..db93ca6ac4750 --- /dev/null +++ b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/transport.crt @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDuDCCAqCgAwIBAgIVAIdpYPATbRn96E+eVTG/s0byNh/FMA0GCSqGSIb3DQEB +CwUAMDQxMjAwBgNVBAMTKUVsYXN0aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2Vu +ZXJhdGVkIENBMB4XDTIwMDQwNzExMzA1OFoXDTI5MTEwNjExMzA1OFowFDESMBAG +A1UEAxMJdHJhbnNwb3J0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +pvfY9x3F8xR2+noX0PWxsc6XrKCxTQcbSHkkEr2Qr6BqGVXwEj/d6ocqdgcC7IZy +0HEwewBbO69Pg8INQ/21stcwZzW+4ZnKYsBmaZx1yCxYcplndFm9t5Fj/jTBsoJ3 +PemvJwMrewuVSwsE4WHVkXz4KVETfUd8DZiWoVnlRgaXfvvudib1DNxtuGEra+Zh +d3JcC1Wsn51qjpHsj/6s/usT6hmlm4Bu5tjAMxXFVX6J0skfRSVhLmNWgr86WBKB +9/qTJU34FBQGh2Ok/knkiO5rae+UCPpEpCNCCV3rFcMdxP613WfemRRqhUL0V6g3 +n4ExJa0853SsfvPEyHOADQIDAQABo4HgMIHdMB0GA1UdDgQWBBSraIvkNPX2TQQg +h8Ee3mWCALYr/zAfBgNVHSMEGDAWgBTHUlS1rQIanw10UI/uaPNJytV2kDCBjwYD +VR0RBIGHMIGEgglsb2NhbGhvc3SCF2xvY2FsaG9zdDYubG9jYWxkb21haW42hwR/ +AAABhxAAAAAAAAAAAAAAAAAAAAABggpsb2NhbGhvc3Q0ggpsb2NhbGhvc3Q2ghVs +b2NhbGhvc3QubG9jYWxkb21haW6CF2xvY2FsaG9zdDQubG9jYWxkb21haW40MAkG +A1UdEwQCMAAwDQYJKoZIhvcNAQELBQADggEBABRKISeKdMRRQAvZBan2ONUFUD3P +7Aq8JP+GttC9Y+1uI0CTIZsnih+tuZ9l2Gq9nyfBVMcDSutlnBoQZLg5lCYQPaK7 +SuFhPRHGauEGYw1TjhrMEPokxzoT/X0/Sz5Ter6/qWzPKQs9EuaVJ27XfZd+kySn +S+cXd95woi+S8JQzQbcpA0rXCnWJ3l2frwG/3Hg8f82x2c6vgOzTG0Hklp0sFkUt +UqaBHGXPLiitaB01jUX60HZbxt5HIEseLctUmQlDtAEWwA3X6cRUEjulwRx8s52T +1FT2ORbVJ7ybKARGBSs932Fv2rWGmg8pOBA4ulJTJNvT0T+ob/H/i40Qd04= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/transport.key b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/transport.key new file mode 100644 index 0000000000000..601c665a48ce6 --- /dev/null +++ b/x-pack/plugin/security/qa/jwt-realm/src/javaRestTest/resources/ssl/transport.key @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-128-CBC,DAC0DDB93011ABD08161118074F353A7 + +hPjzr8y4t3omv6jItFSxF/UeirrdlMhFoxxsw+E5fl4hRjD2J6LuUpOl0XBuvrCO +2NN9Simlkfo57l2O8tZ3xwKU037x9qP2O3wo0FZ4OuRcLbXZtp5kIV30/wdo0kbp +GV+18PtGfReo75rszs/VAm9Hg1URqVw0La2r7DomYQB9FJY8N8mwSdSvF194kjGO +pBxiuzzECUwXEGuMRzmc1Cddbw7NsIdg43FRd1uoC4dqj9yBonYEYe5P8WgopL4N +obTi6PzH+kqDSCaJo7Fdr9CYo37f2YsSbtHmuEZP58J/aSB9nl5wdAmas3/dohrI +5GSM9zp+UocFuV6Uf+X9TTJMt97BlRgFdPODh88pTKGLVQyKeBPQbVjgwl9mttxO +i+c/dej/jHt0gwlt8cvZw0Ss50YdNnWtck91yYpXE7iz59CTY+QI24DEvsaP4bkR +QYdIhJHOYamGW0ttCSU8bw1h9RubIvSa+BoiuB+1TaCYU+azuaAYnFlyuR31z4rD +yniPMnb0+5uOkU/srwb4MxVVw/0iYkKAGTEwdLPKhyheuDU9ixkNOQ/k12zV0R7d +gzMFQOlrB4v8Y4LrsNPnAz/uCTvKgBrOS8p076qeGkSX+JIZVNHYyzLnSy7p6hjO +eD3tDx/SA1HaiLzD1VqujnYb6wshYjQGkSPSY3COq8dQgpCqMAlkOycUQO1SbuNt +HZFv9X0w2z5HjPJXtKLLXMLeluNNRQD+IVhvbZjIM1cAUQNqL3OQPGa7W5RYoFYK +rDffzQAzukD5dt6jH+uu3cwnEeJiW8hxZ0+DHJR1X5EJWpN544yTl8jgSPT8MPAU +kxq7OyE0F/JY+UWP1hPILimDrf3Ov8KRtTDGsSvV3IcX+92QKMcvnK21QBZqZjSs +zcmjp2jN1MLwieJyZ3un0MUT9kOyG/5vGoAJe9O/KDtv6rrhKQN5JHi5yKw0Uwi9 +CwrwwkxbRLSBbWugZGXyBHkR/RGIuEEysLKRFej2q4WBZrPOzZlgyvgBbd6/4Eg5 +twngo6JTmYALwVJNW/drok1H0JelanZ6jM/JjNRFWMZnS5o+4cwQURB5O7TIKHdV +7zEkaw82Ng0Nq8dPrU8N9G3LTmIC1E4t++L+D18C2lV0ZDd0Svh3NIA65FXSRvX9 +2g9GQGfGGbgydmjv9j5lx6VdhuTgYKRL4b5bS7VnH+F9m864g/MtSQpXQPR5B54g +YHFGiKCAzruZt1MmJ5m8Jvpg84i2lIZkGImwAstV7xVkmQoC3i77awmcQP6s7rJd +Lo7RKEysVPDbzdnZnWGK0PWJwtgsqrAvVcK7ghygi+vSQkDF0L7qunchOKa00oZR +LZa08b5BWuXeqw4lXZDQDT7hk3NUyW3H7Z1uxUlt1kvcGb6zrInW6Bin0hqsODvj +0drMOZp/5NTDSwcEzkW+LgjfKZw8Szmhlt3v+luNFr3KzbnFtEvewD1OVikNGzm9 +sfZ899zNkWfvNJaXL3bvzbTn9d8T15YKCwO9RqPpYKDqXBaC4+OjbNsy4AW/JHPr +H/i3D3rhMXR/CALhp4+Knq4o3vMA+3TsUeZ3lOTogobVloWfixIIiRXfaqT4LmEC +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java index 9f93392ad13d7..6ffa09dc1f265 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityDlsAndFlsRestIT.java @@ -14,6 +14,7 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.core.Tuple; import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.rest.ObjectPath; import java.io.IOException; @@ -172,12 +173,17 @@ protected void assertSearchResponseContainsExpectedIndicesAndFields( ) { try { assertOK(searchResponse); - final var searchResult = Arrays.stream(SearchResponse.fromXContent(responseAsParser(searchResponse)).getHits().getHits()) - .collect(Collectors.toMap(SearchHit::getIndex, SearchHit::getSourceAsMap)); + var response = SearchResponseUtils.responseAsSearchResponse(searchResponse); + try { + final var searchResult = Arrays.stream(response.getHits().getHits()) + .collect(Collectors.toMap(SearchHit::getIndex, SearchHit::getSourceAsMap)); - assertThat(searchResult.keySet(), containsInAnyOrder(expectedRemoteIndices)); - for (String remoteIndex : expectedRemoteIndices) { - assertThat(searchResult.get(remoteIndex).keySet(), containsInAnyOrder(expectedFields)); + assertThat(searchResult.keySet(), containsInAnyOrder(expectedRemoteIndices)); + for (String remoteIndex : expectedRemoteIndices) { + assertThat(searchResult.get(remoteIndex).keySet(), containsInAnyOrder(expectedFields)); + } + } finally { + response.decRef(); } } catch (IOException e) { throw new UncheckedIOException(e); @@ -195,13 +201,18 @@ protected void assertSearchResponseContainsExpectedIndicesAndFields( ) { try { assertOK(searchResponse); - final var searchResult = Arrays.stream(SearchResponse.fromXContent(responseAsParser(searchResponse)).getHits().getHits()) - .collect(Collectors.toMap(SearchHit::getIndex, SearchHit::getSourceAsMap)); + var response = SearchResponseUtils.responseAsSearchResponse(searchResponse); + try { + final var searchResult = Arrays.stream(response.getHits().getHits()) + .collect(Collectors.toMap(SearchHit::getIndex, SearchHit::getSourceAsMap)); - assertThat(searchResult.keySet(), equalTo(expectedRemoteIndicesAndFields.keySet())); - for (String remoteIndex : expectedRemoteIndicesAndFields.keySet()) { - Set expectedFields = expectedRemoteIndicesAndFields.get(remoteIndex); - assertThat(searchResult.get(remoteIndex).keySet(), equalTo(expectedFields)); + assertThat(searchResult.keySet(), equalTo(expectedRemoteIndicesAndFields.keySet())); + for (String remoteIndex : expectedRemoteIndicesAndFields.keySet()) { + Set expectedFields = expectedRemoteIndicesAndFields.get(remoteIndex); + assertThat(searchResult.get(remoteIndex).keySet(), equalTo(expectedFields)); + } + } finally { + response.decRef(); } } catch (IOException e) { throw new UncheckedIOException(e); @@ -211,7 +222,7 @@ protected void assertSearchResponseContainsExpectedIndicesAndFields( protected void assertSearchResponseContainsEmptyResult(Response response) { try { assertOK(response); - SearchResponse searchResponse = SearchResponse.fromXContent(responseAsParser(response)); + SearchResponse searchResponse = SearchResponseUtils.responseAsSearchResponse(response); try { assertThat(searchResponse.getHits().getTotalHits().value, equalTo(0L)); } finally { diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityWithMultipleRemotesRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityWithMultipleRemotesRestIT.java index 536176ed4c833..aa65edae88506 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityWithMultipleRemotesRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/AbstractRemoteClusterSecurityWithMultipleRemotesRestIT.java @@ -214,7 +214,10 @@ private static void searchAndExpect403(String searchPath) { static void searchAndAssertIndicesFound(String searchPath, String... expectedIndices) throws IOException { final Response response = performRequestWithRemoteSearchUser(new Request("GET", searchPath)); assertOK(response); - final SearchResponse searchResponse = SearchResponse.fromXContent(responseAsParser(response)); + final SearchResponse searchResponse; + try (var parser = responseAsParser(response)) { + searchResponse = SearchResponse.fromXContent(parser); + } try { final List actualIndices = Arrays.stream(searchResponse.getHits().getHits()) .map(SearchHit::getIndex) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java index 4227354561178..d103e3c50ef7e 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityApiKeyRestIT.java @@ -183,7 +183,10 @@ public void testCrossClusterSearchWithApiKey() throws Exception { ); final Response response = performRequestWithApiKey(searchRequest, apiKeyEncoded); assertOK(response); - final SearchResponse searchResponse = SearchResponse.fromXContent(responseAsParser(response)); + final SearchResponse searchResponse; + try (var parser = responseAsParser(response)) { + searchResponse = SearchResponse.fromXContent(parser); + } try { final List actualIndices = Arrays.stream(searchResponse.getHits().getHits()) .map(SearchHit::getIndex) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java index 8c01398dd2969..5c4b61537e9a5 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityBwcRestIT.java @@ -189,7 +189,10 @@ public void testBwcWithLegacyCrossClusterSearch() throws Exception { ? performRequestWithRemoteAccessUser(searchRequest) : performRequestWithApiKey(searchRequest, apiKeyEncoded); assertOK(response); - final SearchResponse searchResponse = SearchResponse.fromXContent(responseAsParser(response)); + final SearchResponse searchResponse; + try (var parser = responseAsParser(response)) { + searchResponse = SearchResponse.fromXContent(parser); + } try { final List actualIndices = Arrays.stream(searchResponse.getHits().getHits()) .map(SearchHit::getIndex) diff --git a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java index 03489f6365dd1..d4321f63017ad 100644 --- a/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java +++ b/x-pack/plugin/security/qa/multi-cluster/src/javaRestTest/java/org/elasticsearch/xpack/remotecluster/RemoteClusterSecurityCcrIT.java @@ -276,7 +276,10 @@ private void verifyReplicatedDocuments(long numberOfDocs, String... indices) thr throw new AssertionError(e); } assertOK(response); - final SearchResponse searchResponse = SearchResponse.fromXContent(responseAsParser(response)); + final SearchResponse searchResponse; + try (var parser = responseAsParser(response)) { + searchResponse = SearchResponse.fromXContent(parser); + } try { assertThat(searchResponse.getHits().getTotalHits().value, equalTo(numberOfDocs)); assertThat( diff --git a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java index 6e78eb2fb5b83..cab0c2bff28f0 100644 --- a/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java +++ b/x-pack/plugin/security/qa/operator-privileges-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/operator/Constants.java @@ -255,6 +255,7 @@ public class Constants { "cluster:admin/xpack/security/profile/suggest", "cluster:admin/xpack/security/profile/set_enabled", "cluster:admin/xpack/security/realm/cache/clear", + "cluster:admin/xpack/security/remote_cluster_credentials/reload", "cluster:admin/xpack/security/role/delete", "cluster:admin/xpack/security/role/get", "cluster:admin/xpack/security/role/put", @@ -516,6 +517,7 @@ public class Constants { "indices:data/read/eql", "indices:data/read/eql/async/get", "indices:data/read/esql", + "indices:data/read/esql/async/get", "indices:data/read/explain", "indices:data/read/field_caps", "indices:data/read/get", diff --git a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java index 97b52a699749e..51358d82bb238 100644 --- a/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java +++ b/x-pack/plugin/security/qa/security-trial/src/javaRestTest/java/org/elasticsearch/xpack/security/crossclusteraccess/CrossClusterAccessHeadersForCcsRestIT.java @@ -38,7 +38,6 @@ import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.aggregations.InternalAggregations; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.ObjectPath; import org.elasticsearch.test.transport.MockTransportService; @@ -1152,15 +1151,13 @@ private static MockTransportService startTransport( ); channel.sendResponse( new SearchResponse( - new InternalSearchResponse( - new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), - InternalAggregations.EMPTY, - null, - null, - false, - null, - 1 - ), + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), + InternalAggregations.EMPTY, + null, + false, + null, + null, + 1, null, 1, 1, diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ReloadRemoteClusterCredentialsIT.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ReloadRemoteClusterCredentialsIT.java new file mode 100644 index 0000000000000..05b859a1e94c9 --- /dev/null +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ReloadRemoteClusterCredentialsIT.java @@ -0,0 +1,311 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security; + +import org.apache.lucene.search.TotalHits; +import org.elasticsearch.TransportVersion; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.admin.cluster.node.reload.NodesReloadSecureSettingsResponse; +import org.elasticsearch.action.admin.cluster.remote.RemoteClusterNodesAction; +import org.elasticsearch.action.admin.cluster.settings.ClusterUpdateSettingsRequest; +import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; +import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest; +import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.action.search.SearchShardsRequest; +import org.elasticsearch.action.search.SearchShardsResponse; +import org.elasticsearch.action.search.ShardSearchFailure; +import org.elasticsearch.action.search.TransportSearchAction; +import org.elasticsearch.action.search.TransportSearchShardsAction; +import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.node.VersionInformation; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.settings.KeyStoreWrapper; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.transport.TransportAddress; +import org.elasticsearch.common.util.concurrent.ConcurrentCollections; +import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.env.Environment; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.test.SecuritySingleNodeTestCase; +import org.elasticsearch.test.transport.MockTransportService; +import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.RemoteClusterCredentialsManager; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.security.authc.ApiKeyService; +import org.elasticsearch.xpack.security.authc.CrossClusterAccessHeaders; +import org.junit.BeforeClass; + +import java.util.Collections; +import java.util.List; +import java.util.Map; +import java.util.concurrent.BlockingQueue; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.ExecutionException; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; + +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.notNullValue; +import static org.hamcrest.Matchers.nullValue; + +public class ReloadRemoteClusterCredentialsIT extends SecuritySingleNodeTestCase { + private static final String CLUSTER_ALIAS = "my_remote_cluster"; + + @BeforeClass + public static void disableInFips() { + assumeFalse( + "Cannot run in FIPS mode since the keystore will be password protected and sending a password in the reload" + + "settings api call, require TLS to be configured for the transport layer", + inFipsJvm() + ); + } + + @Override + public String configRoles() { + return org.elasticsearch.core.Strings.format(""" + user: + cluster: [ "ALL" ] + indices: + - names: '*' + privileges: [ "ALL" ] + remote_indices: + - names: '*' + privileges: [ "ALL" ] + clusters: ["*"] + """); + } + + @Override + public void tearDown() throws Exception { + try { + clearRemoteCluster(); + super.tearDown(); + } finally { + ThreadPool.terminate(threadPool, 10, TimeUnit.SECONDS); + } + } + + private final ThreadPool threadPool = new TestThreadPool(getClass().getName()); + + public void testReloadRemoteClusterCredentials() throws Exception { + final String credentials = randomAlphaOfLength(42); + writeCredentialsToKeyStore(credentials); + final RemoteClusterCredentialsManager clusterCredentialsManager = getInstanceFromNode(TransportService.class) + .getRemoteClusterService() + .getRemoteClusterCredentialsManager(); + // Until we reload, credentials written to keystore are not loaded into the credentials manager + assertThat(clusterCredentialsManager.hasCredentials(CLUSTER_ALIAS), is(false)); + reloadSecureSettings(); + assertThat(clusterCredentialsManager.resolveCredentials(CLUSTER_ALIAS), equalTo(credentials)); + + // Check that credentials get used for a remote connection, once we configure it + final BlockingQueue> capturedHeaders = ConcurrentCollections.newBlockingQueue(); + try (MockTransportService remoteTransport = startTransport("remoteNodeA", threadPool, capturedHeaders)) { + final TransportAddress remoteAddress = remoteTransport.getOriginalTransport() + .profileBoundAddresses() + .get("_remote_cluster") + .publishAddress(); + + configureRemoteCluster(remoteAddress); + + // Run search to trigger header capturing on the receiving side + client().search(new SearchRequest(CLUSTER_ALIAS + ":index-a")).get().decRef(); + + assertHeadersContainCredentialsThenClear(credentials, capturedHeaders); + + // Update credentials and ensure they are used + final String updatedCredentials = randomAlphaOfLength(41); + writeCredentialsToKeyStore(updatedCredentials); + reloadSecureSettings(); + + client().search(new SearchRequest(CLUSTER_ALIAS + ":index-a")).get().decRef(); + + assertHeadersContainCredentialsThenClear(updatedCredentials, capturedHeaders); + } + } + + private void assertHeadersContainCredentialsThenClear(String credentials, BlockingQueue> capturedHeaders) { + assertThat(capturedHeaders, is(not(empty()))); + for (Map actualHeaders : capturedHeaders) { + assertThat(actualHeaders, hasKey(CrossClusterAccessHeaders.CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY)); + assertThat( + actualHeaders.get(CrossClusterAccessHeaders.CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY), + equalTo(ApiKeyService.withApiKeyPrefix(credentials)) + ); + } + capturedHeaders.clear(); + assertThat(capturedHeaders, is(empty())); + } + + private void clearRemoteCluster() throws InterruptedException, ExecutionException { + final var builder = Settings.builder() + .putNull("cluster.remote." + CLUSTER_ALIAS + ".mode") + .putNull("cluster.remote." + CLUSTER_ALIAS + ".seeds") + .putNull("cluster.remote." + CLUSTER_ALIAS + ".proxy_address"); + clusterAdmin().updateSettings(new ClusterUpdateSettingsRequest().persistentSettings(builder)).get(); + } + + @Override + protected Settings nodeSettings() { + return Settings.builder().put(super.nodeSettings()).put("xpack.security.remote_cluster_client.ssl.enabled", false).build(); + } + + private void configureRemoteCluster(TransportAddress remoteAddress) throws InterruptedException, ExecutionException { + final Settings.Builder builder = Settings.builder(); + if (randomBoolean()) { + builder.put("cluster.remote." + CLUSTER_ALIAS + ".mode", "sniff") + .put("cluster.remote." + CLUSTER_ALIAS + ".seeds", remoteAddress.toString()) + .putNull("cluster.remote." + CLUSTER_ALIAS + ".proxy_address"); + } else { + builder.put("cluster.remote." + CLUSTER_ALIAS + ".mode", "proxy") + .put("cluster.remote." + CLUSTER_ALIAS + ".proxy_address", remoteAddress.toString()) + .putNull("cluster.remote." + CLUSTER_ALIAS + ".seeds"); + } + clusterAdmin().updateSettings(new ClusterUpdateSettingsRequest().persistentSettings(builder)).get(); + } + + private void writeCredentialsToKeyStore(String credentials) throws Exception { + final Environment environment = getInstanceFromNode(Environment.class); + final KeyStoreWrapper keyStoreWrapper = KeyStoreWrapper.create(); + keyStoreWrapper.setString("cluster.remote." + CLUSTER_ALIAS + ".credentials", credentials.toCharArray()); + keyStoreWrapper.save(environment.configFile(), new char[0], false); + } + + public static MockTransportService startTransport( + final String nodeName, + final ThreadPool threadPool, + final BlockingQueue> capturedHeaders + ) { + boolean success = false; + final Settings settings = Settings.builder() + .put("node.name", nodeName) + .put("remote_cluster_server.enabled", "true") + .put("remote_cluster.port", "0") + .put("xpack.security.remote_cluster_server.ssl.enabled", "false") + .build(); + final MockTransportService service = MockTransportService.createNewService( + settings, + VersionInformation.CURRENT, + TransportVersion.current(), + threadPool, + null + ); + try { + service.registerRequestHandler( + ClusterStateAction.NAME, + EsExecutors.DIRECT_EXECUTOR_SERVICE, + ClusterStateRequest::new, + (request, channel, task) -> { + capturedHeaders.add(Map.copyOf(threadPool.getThreadContext().getHeaders())); + channel.sendResponse( + new ClusterStateResponse(ClusterName.DEFAULT, ClusterState.builder(ClusterName.DEFAULT).build(), false) + ); + } + ); + service.registerRequestHandler( + RemoteClusterNodesAction.TYPE.name(), + EsExecutors.DIRECT_EXECUTOR_SERVICE, + RemoteClusterNodesAction.Request::new, + (request, channel, task) -> { + capturedHeaders.add(Map.copyOf(threadPool.getThreadContext().getHeaders())); + channel.sendResponse(new RemoteClusterNodesAction.Response(List.of())); + } + ); + service.registerRequestHandler( + TransportSearchShardsAction.TYPE.name(), + EsExecutors.DIRECT_EXECUTOR_SERVICE, + SearchShardsRequest::new, + (request, channel, task) -> { + capturedHeaders.add(Map.copyOf(threadPool.getThreadContext().getHeaders())); + channel.sendResponse(new SearchShardsResponse(List.of(), List.of(), Collections.emptyMap())); + } + ); + service.registerRequestHandler( + TransportSearchAction.TYPE.name(), + EsExecutors.DIRECT_EXECUTOR_SERVICE, + SearchRequest::new, + (request, channel, task) -> { + capturedHeaders.add(Map.copyOf(threadPool.getThreadContext().getHeaders())); + channel.sendResponse( + new SearchResponse( + new SearchHits(new SearchHit[0], new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN), + InternalAggregations.EMPTY, + null, + false, + null, + null, + 1, + null, + 1, + 1, + 0, + 100, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ) + ); + } + ); + service.start(); + service.acceptIncomingRequests(); + success = true; + return service; + } finally { + if (success == false) { + service.close(); + } + } + } + + private void reloadSecureSettings() throws InterruptedException { + final AtomicReference reloadSettingsError = new AtomicReference<>(); + final CountDownLatch latch = new CountDownLatch(1); + final SecureString emptyPassword = randomBoolean() ? new SecureString(new char[0]) : null; + clusterAdmin().prepareReloadSecureSettings() + .setSecureStorePassword(emptyPassword) + .setNodesIds(Strings.EMPTY_ARRAY) + .execute(new ActionListener<>() { + @Override + public void onResponse(NodesReloadSecureSettingsResponse nodesReloadResponse) { + try { + assertThat(nodesReloadResponse, notNullValue()); + final Map nodesMap = nodesReloadResponse.getNodesMap(); + assertThat(nodesMap.size(), equalTo(1)); + for (final NodesReloadSecureSettingsResponse.NodeResponse nodeResponse : nodesReloadResponse.getNodes()) { + assertThat(nodeResponse.reloadException(), nullValue()); + } + } catch (final AssertionError e) { + reloadSettingsError.set(e); + } finally { + latch.countDown(); + } + } + + @Override + public void onFailure(Exception e) { + reloadSettingsError.set(new AssertionError("Nodes request failed", e)); + latch.countDown(); + } + }); + latch.await(); + if (reloadSettingsError.get() != null) { + throw reloadSettingsError.get(); + } + } +} diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java index 7fc4c1520f9c6..e481cf70b9afe 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/ScrollHelperIntegTests.java @@ -20,7 +20,6 @@ import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.security.ScrollHelper; @@ -83,22 +82,28 @@ public void testFetchAllByEntityWithBrokenScroll() { String scrollId = randomAlphaOfLength(5); SearchHit[] hits = new SearchHit[] { new SearchHit(1), new SearchHit(2) }; - InternalSearchResponse internalResponse = new InternalSearchResponse( - new SearchHits(hits, new TotalHits(3, TotalHits.Relation.EQUAL_TO), 1), - null, - null, - null, - false, - false, - 1 - ); Answer returnResponse = invocation -> { @SuppressWarnings("unchecked") ActionListener listener = (ActionListener) invocation.getArguments()[1]; ActionListener.respondAndRelease( listener, - new SearchResponse(internalResponse, scrollId, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY) + new SearchResponse( + new SearchHits(hits, new TotalHits(3, TotalHits.Relation.EQUAL_TO), 1), + null, + null, + false, + false, + null, + 1, + scrollId, + 1, + 1, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ) ); return null; }; diff --git a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java index 19d29ef251dd1..faa85150dca31 100644 --- a/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java +++ b/x-pack/plugin/security/src/internalClusterTest/java/org/elasticsearch/xpack/security/authc/TokenAuthIntegTests.java @@ -44,8 +44,10 @@ import java.time.temporal.ChronoUnit; import java.util.ArrayList; import java.util.Collections; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; @@ -635,11 +637,11 @@ public void testRefreshingMultipleTimesWithinWindowSucceeds() throws Exception { } } logger.info("received access token [{}] and refresh token [{}]", result.accessToken(), result.getRefreshToken()); - completedLatch.countDown(); } catch (IOException e) { failed.set(true); - completedLatch.countDown(); logger.error("caught exception", e); + } finally { + completedLatch.countDown(); } })); } @@ -655,7 +657,9 @@ public void testRefreshingMultipleTimesWithinWindowSucceeds() throws Exception { assertThat(failed.get(), equalTo(false)); // Assert that we only ever got one token/refresh_token pair synchronized (tokens) { - assertThat((int) tokens.stream().distinct().count(), equalTo(1)); + Set uniqueTokens = new HashSet<>(tokens); + logger.info("Unique tokens received from refreshToken call [{}]", uniqueTokens); + assertThat(uniqueTokens.size(), equalTo(1)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 51a902d7e12c0..1d849055c70a5 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -13,6 +13,7 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.lucene.util.SetOnce; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.TransportVersion; @@ -21,6 +22,7 @@ import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilter; import org.elasticsearch.action.support.DestructiveOperations; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.bootstrap.BootstrapCheck; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterState; @@ -110,6 +112,7 @@ import org.elasticsearch.xpack.core.security.SecurityExtension; import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.security.SecuritySettings; +import org.elasticsearch.xpack.core.security.action.ActionTypes; import org.elasticsearch.xpack.core.security.action.ClearSecurityCacheAction; import org.elasticsearch.xpack.core.security.action.DelegatePkiAuthenticationAction; import org.elasticsearch.xpack.core.security.action.apikey.BulkUpdateApiKeyAction; @@ -244,6 +247,7 @@ import org.elasticsearch.xpack.security.action.service.TransportGetServiceAccountCredentialsAction; import org.elasticsearch.xpack.security.action.service.TransportGetServiceAccountNodesCredentialsAction; import org.elasticsearch.xpack.security.action.settings.TransportGetSecuritySettingsAction; +import org.elasticsearch.xpack.security.action.settings.TransportReloadRemoteClusterCredentialsAction; import org.elasticsearch.xpack.security.action.settings.TransportUpdateSecuritySettingsAction; import org.elasticsearch.xpack.security.action.token.TransportCreateTokenAction; import org.elasticsearch.xpack.security.action.token.TransportInvalidateTokenAction; @@ -364,7 +368,6 @@ import org.elasticsearch.xpack.security.support.CacheInvalidatorRegistry; import org.elasticsearch.xpack.security.support.ExtensionComponents; import org.elasticsearch.xpack.security.support.SecuritySystemIndices; -import org.elasticsearch.xpack.security.transport.RemoteClusterCredentialsResolver; import org.elasticsearch.xpack.security.transport.SecurityHttpSettings; import org.elasticsearch.xpack.security.transport.SecurityServerTransportInterceptor; import org.elasticsearch.xpack.security.transport.filter.IPFilter; @@ -372,6 +375,7 @@ import java.io.IOException; import java.net.InetSocketAddress; +import java.security.Provider; import java.time.Clock; import java.util.ArrayList; import java.util.Arrays; @@ -386,6 +390,7 @@ import java.util.Objects; import java.util.Set; import java.util.concurrent.Executor; +import java.util.concurrent.TimeUnit; import java.util.function.BiConsumer; import java.util.function.Function; import java.util.function.Predicate; @@ -554,6 +559,7 @@ public class Security extends Plugin private final SetOnce reservedRoleMappingAction = new SetOnce<>(); private final SetOnce workflowService = new SetOnce<>(); private final SetOnce realms = new SetOnce<>(); + private final SetOnce client = new SetOnce<>(); public Security(Settings settings) { this(settings, Collections.emptyList()); @@ -573,25 +579,30 @@ public Security(Settings settings) { runStartupChecks(settings); Automatons.updateConfiguration(settings); } else { - final List remoteClusterCredentialsSettingKeys = RemoteClusterService.REMOTE_CLUSTER_CREDENTIALS.getAllConcreteSettings( - settings - ).map(Setting::getKey).sorted().toList(); - if (false == remoteClusterCredentialsSettingKeys.isEmpty()) { - throw new IllegalArgumentException( - format( - "Found [%s] remote clusters with credentials [%s]. Security [%s] must be enabled to connect to them. " - + "Please either enable security or remove these settings from the keystore.", - remoteClusterCredentialsSettingKeys.size(), - Strings.collectionToCommaDelimitedString(remoteClusterCredentialsSettingKeys), - XPackSettings.SECURITY_ENABLED.getKey() - ) - ); - } + ensureNoRemoteClusterCredentialsOnDisabledSecurity(settings); this.bootstrapChecks.set(Collections.emptyList()); } this.securityExtensions.addAll(extensions); } + private void ensureNoRemoteClusterCredentialsOnDisabledSecurity(Settings settings) { + assert false == enabled; + final List remoteClusterCredentialsSettingKeys = RemoteClusterService.REMOTE_CLUSTER_CREDENTIALS.getAllConcreteSettings( + settings + ).map(Setting::getKey).sorted().toList(); + if (false == remoteClusterCredentialsSettingKeys.isEmpty()) { + throw new IllegalArgumentException( + format( + "Found [%s] remote clusters with credentials [%s]. Security [%s] must be enabled to connect to them. " + + "Please either enable security or remove these settings from the keystore.", + remoteClusterCredentialsSettingKeys.size(), + Strings.collectionToCommaDelimitedString(remoteClusterCredentialsSettingKeys), + XPackSettings.SECURITY_ENABLED.getKey() + ) + ); + } + } + private static void runStartupChecks(Settings settings) { validateRealmSettings(settings); if (XPackSettings.FIPS_MODE_ENABLED.get(settings)) { @@ -616,6 +627,14 @@ protected XPackLicenseState getLicenseState() { return XPackPlugin.getSharedLicenseState(); } + protected Client getClient() { + return client.get(); + } + + protected Realms getRealms() { + return realms.get(); + } + @Override public Collection createComponents(PluginServices services) { try { @@ -654,6 +673,8 @@ Collection createComponents( return Collections.singletonList(new SecurityUsageServices(null, null, null, null, null, null)); } + this.client.set(client); + // The settings in `environment` may have additional values over what was provided during construction // See Plugin#additionalSettings() this.settings = environment.settings(); @@ -980,8 +1001,6 @@ Collection createComponents( ipFilter.set(new IPFilter(settings, auditTrailService, clusterService.getClusterSettings(), getLicenseState())); components.add(ipFilter.get()); - final RemoteClusterCredentialsResolver remoteClusterCredentialsResolver = new RemoteClusterCredentialsResolver(settings); - DestructiveOperations destructiveOperations = new DestructiveOperations(settings, clusterService.getClusterSettings()); crossClusterAccessAuthcService.set(new CrossClusterAccessAuthenticationService(clusterService, apiKeyService, authcService.get())); components.add(crossClusterAccessAuthcService.get()); @@ -995,7 +1014,6 @@ Collection createComponents( securityContext.get(), destructiveOperations, crossClusterAccessAuthcService.get(), - remoteClusterCredentialsResolver, getLicenseState() ) ); @@ -1161,6 +1179,7 @@ public static List> getSettings(List securityExten // The following just apply in node mode settingsList.add(XPackSettings.FIPS_MODE_ENABLED); + settingsList.add(XPackSettings.FIPS_REQUIRED_PROVIDERS); SSLService.registerSettings(settingsList); // IP Filter settings @@ -1348,6 +1367,7 @@ public void onIndexModule(IndexModule module) { new ActionHandler<>(SetProfileEnabledAction.INSTANCE, TransportSetProfileEnabledAction.class), new ActionHandler<>(GetSecuritySettingsAction.INSTANCE, TransportGetSecuritySettingsAction.class), new ActionHandler<>(UpdateSecuritySettingsAction.INSTANCE, TransportUpdateSecuritySettingsAction.class), + new ActionHandler<>(ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION, TransportReloadRemoteClusterCredentialsAction.class), usageAction, infoAction ).filter(Objects::nonNull).toList(); @@ -1543,6 +1563,30 @@ static void validateForFips(Settings settings) { } }); + Set foundProviders = new HashSet<>(); + for (Provider provider : java.security.Security.getProviders()) { + foundProviders.add(provider.getName().toLowerCase(Locale.ROOT)); + if (logger.isTraceEnabled()) { + logger.trace("Security Provider: " + provider.getName() + ", Version: " + provider.getVersionStr()); + provider.entrySet().forEach(entry -> { logger.trace("\t" + entry.getKey()); }); + } + } + + final List requiredProviders = XPackSettings.FIPS_REQUIRED_PROVIDERS.get(settings); + logger.info("JVM Security Providers: " + foundProviders); + if (requiredProviders != null && requiredProviders.isEmpty() == false) { + List unsatisfiedProviders = requiredProviders.stream() + .map(s -> s.toLowerCase(Locale.ROOT)) + .filter(element -> foundProviders.contains(element) == false) + .toList(); + + if (unsatisfiedProviders.isEmpty() == false) { + String errorMessage = "Could not find required FIPS security provider: " + unsatisfiedProviders; + logger.error(errorMessage); + validationErrors.add(errorMessage); + } + } + if (validationErrors.isEmpty() == false) { final StringBuilder sb = new StringBuilder(); sb.append("Validation for FIPS 140 mode failed: \n"); @@ -1887,16 +1931,56 @@ public BiConsumer getJoinValidator() { @Override public void reload(Settings settings) throws Exception { if (enabled) { - realms.get().stream().filter(r -> JwtRealmSettings.TYPE.equals(r.realmRef().getType())).forEach(realm -> { - if (realm instanceof JwtRealm jwtRealm) { - jwtRealm.rotateClientSecret( - CLIENT_AUTHENTICATION_SHARED_SECRET.getConcreteSettingForNamespace(realm.realmRef().getName()).get(settings) - ); - } - }); + final List reloadExceptions = new ArrayList<>(); + try { + reloadRemoteClusterCredentials(settings); + } catch (Exception ex) { + reloadExceptions.add(ex); + } + + try { + reloadSharedSecretsForJwtRealms(settings); + } catch (Exception ex) { + reloadExceptions.add(ex); + } + + if (false == reloadExceptions.isEmpty()) { + final var combinedException = new ElasticsearchException( + "secure settings reload failed for one or more security components" + ); + reloadExceptions.forEach(combinedException::addSuppressed); + throw combinedException; + } + } else { + ensureNoRemoteClusterCredentialsOnDisabledSecurity(settings); } } + private void reloadSharedSecretsForJwtRealms(Settings settingsWithKeystore) { + getRealms().stream().filter(r -> JwtRealmSettings.TYPE.equals(r.realmRef().getType())).forEach(realm -> { + if (realm instanceof JwtRealm jwtRealm) { + jwtRealm.rotateClientSecret( + CLIENT_AUTHENTICATION_SHARED_SECRET.getConcreteSettingForNamespace(realm.realmRef().getName()).get(settingsWithKeystore) + ); + } + }); + } + + /** + * This method uses a transport action internally to access classes that are injectable but not part of the plugin contract. + * See {@link TransportReloadRemoteClusterCredentialsAction} for more context. + */ + private void reloadRemoteClusterCredentials(Settings settingsWithKeystore) { + final PlainActionFuture future = new PlainActionFuture<>(); + getClient().execute( + ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION, + new TransportReloadRemoteClusterCredentialsAction.Request(settingsWithKeystore), + future + ); + assert future.isDone() : "expecting local-only action call to return immediately on invocation"; + future.actionGet(0, TimeUnit.NANOSECONDS); + } + static final class ValidateLicenseForFIPS implements BiConsumer { private final boolean inFipsMode; private final LicenseService licenseService; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportReloadRemoteClusterCredentialsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportReloadRemoteClusterCredentialsAction.java new file mode 100644 index 0000000000000..d6f54e9d3e9e1 --- /dev/null +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/action/settings/TransportReloadRemoteClusterCredentialsAction.java @@ -0,0 +1,77 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.xpack.security.action.settings; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.TransportAction; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.RemoteClusterService; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.security.action.ActionTypes; +import org.elasticsearch.xpack.security.Security; + +import java.io.IOException; + +/** + * This is a local-only action which updates remote cluster credentials for remote cluster connections, from keystore settings reloaded via + * a call to {@link org.elasticsearch.rest.action.admin.cluster.RestReloadSecureSettingsAction}. + * + * It's invoked as part of the {@link Security#reload(Settings)} call. + * + * This action is largely an implementation detail to work around the fact that Security is a plugin without direct access to many core + * classes, including the {@link RemoteClusterService} which is required for a credentials reload. A transport action gives us access to + * the {@link RemoteClusterService} which is injectable but not part of the plugin contract. + */ +public class TransportReloadRemoteClusterCredentialsAction extends TransportAction< + TransportReloadRemoteClusterCredentialsAction.Request, + ActionResponse.Empty> { + + private final RemoteClusterService remoteClusterService; + + @Inject + public TransportReloadRemoteClusterCredentialsAction(TransportService transportService, ActionFilters actionFilters) { + super(ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION.name(), actionFilters, transportService.getTaskManager()); + this.remoteClusterService = transportService.getRemoteClusterService(); + } + + @Override + protected void doExecute(Task task, Request request, ActionListener listener) { + // We avoid stashing and marking context as system to keep the action as minimal as possible (i.e., avoid copying context) + remoteClusterService.updateRemoteClusterCredentials(request.getSettings()); + listener.onResponse(ActionResponse.Empty.INSTANCE); + } + + public static class Request extends ActionRequest { + private final Settings settings; + + public Request(Settings settings) { + this.settings = settings; + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + public Settings getSettings() { + return settings; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + localOnly(); + } + } +} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java index 2d700e23f127c..6f82acfcebb44 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ApiKeyService.java @@ -803,7 +803,7 @@ private static boolean isNoop( final Authentication authentication, final BaseUpdateApiKeyRequest request, final Set userRoleDescriptors - ) { + ) throws IOException { if (apiKeyDoc.version != targetDocVersion.id) { return false; } @@ -824,12 +824,11 @@ private static boolean isNoop( return false; } @SuppressWarnings("unchecked") - final var currentRealmDomain = RealmDomain.fromXContent( - XContentHelper.mapToXContentParser( - XContentParserConfiguration.EMPTY, - (Map) currentCreator.get("realm_domain") - ) - ); + var m = (Map) currentCreator.get("realm_domain"); + final RealmDomain currentRealmDomain; + try (var parser = XContentHelper.mapToXContentParser(XContentParserConfiguration.EMPTY, m)) { + currentRealmDomain = RealmDomain.fromXContent(parser); + } if (sourceRealm.getDomain().equals(currentRealmDomain) == false) { return false; } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java index aeb101ac0caa4..9c378e0e1156e 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/TokenService.java @@ -210,7 +210,7 @@ public final class TokenService { static final TransportVersion VERSION_ACCESS_TOKENS_AS_UUIDS = TransportVersions.V_7_2_0; static final TransportVersion VERSION_MULTIPLE_CONCURRENT_REFRESHES = TransportVersions.V_7_2_0; static final TransportVersion VERSION_CLIENT_AUTH_FOR_REFRESH = TransportVersions.V_8_2_0; - static final TransportVersion VERSION_GET_TOKEN_DOC_FOR_REFRESH = TransportVersions.V_8_500_040; + static final TransportVersion VERSION_GET_TOKEN_DOC_FOR_REFRESH = TransportVersions.V_8_500_061; private static final Logger logger = LogManager.getLogger(TokenService.class); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java index 8942be0bee29c..777fe5f71b0a0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/service/ElasticServiceAccounts.java @@ -157,7 +157,7 @@ final class ElasticServiceAccounts { new String[] { "monitor", "manage_own_api_key" }, new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder() - .indices("logs-*", "metrics-*") + .indices("logs-*", "metrics-*", "traces-*") .privileges("write", "create_index", "auto_configure") .build(), }, null, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java index 8a0a9c09b7d1a..e4e9bc453ee83 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/AuthorizationService.java @@ -25,6 +25,7 @@ import org.elasticsearch.action.datastreams.MigrateToDataStreamAction; import org.elasticsearch.action.delete.TransportDeleteAction; import org.elasticsearch.action.index.TransportIndexAction; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.GroupedActionListener; import org.elasticsearch.action.support.replication.TransportReplicationAction.ConcreteShardRequest; import org.elasticsearch.action.update.TransportUpdateAction; @@ -471,6 +472,11 @@ private void authorizeAction( } else if (isIndexAction(action)) { final Metadata metadata = clusterService.state().metadata(); final AsyncSupplier resolvedIndicesAsyncSupplier = new CachingAsyncSupplier<>(resolvedIndicesListener -> { + if (request instanceof SearchRequest searchRequest && searchRequest.pointInTimeBuilder() != null) { + var resolvedIndices = indicesAndAliasesResolver.resolvePITIndices(searchRequest); + resolvedIndicesListener.onResponse(resolvedIndices); + return; + } final ResolvedIndices resolvedIndices = IndicesAndAliasesResolver.tryResolveWithoutWildcards(action, request); if (resolvedIndices != null) { resolvedIndicesListener.onResponse(resolvedIndices); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java index 16258e71e85b8..a4163b6f10fc0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/IndicesAndAliasesResolver.java @@ -11,6 +11,8 @@ import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; +import org.elasticsearch.action.search.SearchContextId; +import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.cluster.metadata.AliasMetadata; import org.elasticsearch.cluster.metadata.IndexAbstraction; @@ -176,6 +178,24 @@ static ResolvedIndices resolveIndicesAndAliasesWithoutWildcards(String action, I return new ResolvedIndices(localIndices, List.of()); } + /** + * Returns the resolved indices from the {@link SearchContextId} within the provided {@link SearchRequest}. + */ + ResolvedIndices resolvePITIndices(SearchRequest request) { + assert request.pointInTimeBuilder() != null; + var indices = SearchContextId.decodeIndices(request.pointInTimeBuilder().getEncodedId()); + final ResolvedIndices split; + if (request.allowsRemoteIndices()) { + split = remoteClusterResolver.splitLocalAndRemoteIndexNames(indices); + } else { + split = new ResolvedIndices(Arrays.asList(indices), Collections.emptyList()); + } + if (split.isEmpty()) { + return new ResolvedIndices(List.of(NO_INDEX_PLACEHOLDER), Collections.emptyList()); + } + return split; + } + private static void throwOnUnexpectedWildcards(String action, String[] indices) { final List wildcards = Stream.of(indices).filter(Regex::isSimpleMatchPattern).toList(); assert wildcards.isEmpty() == false : "we already know that there's at least one wildcard in the indices"; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java index 03ac7d5e0fa36..70d086cc5a831 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/FileRolesStore.java @@ -280,8 +280,7 @@ static RoleDescriptor parseRoleDescriptor( String roleName = null; XContentParserConfiguration parserConfig = XContentParserConfiguration.EMPTY.withRegistry(xContentRegistry) .withDeprecationHandler(LoggingDeprecationHandler.INSTANCE); - try { - XContentParser parser = YamlXContent.yamlXContent.createParser(parserConfig, segment); + try (XContentParser parser = YamlXContent.yamlXContent.createParser(parserConfig, segment)) { XContentParser.Token token = parser.nextToken(); if (token == XContentParser.Token.START_OBJECT) { token = parser.nextToken(); diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestUpdateProfileDataAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestUpdateProfileDataAction.java index 6e178f30fe1b3..899d68063cf3b 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestUpdateProfileDataAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/profile/RestUpdateProfileDataAction.java @@ -63,7 +63,10 @@ protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClien final long ifPrimaryTerm = request.paramAsLong("if_primary_term", -1); final long ifSeqNo = request.paramAsLong("if_seq_no", -1); final RefreshPolicy refreshPolicy = RefreshPolicy.parse(request.param("refresh", "wait_for")); - final Payload payload = PARSER.parse(request.contentParser(), null); + final Payload payload; + try (var parser = request.contentParser()) { + payload = PARSER.parse(parser, null); + } final UpdateProfileDataRequest updateProfileDataRequest = new UpdateProfileDataRequest( uid, diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java index 7ee8ea5d41a63..b2e8719b25c24 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/action/settings/RestUpdateSecuritySettingsAction.java @@ -36,7 +36,10 @@ public List routes() { @Override protected RestChannelConsumer innerPrepareRequest(RestRequest request, NodeClient client) throws IOException { - UpdateSecuritySettingsAction.Request req = UpdateSecuritySettingsAction.Request.parse(request.contentParser()); + UpdateSecuritySettingsAction.Request req; + try (var parser = request.contentParser()) { + req = UpdateSecuritySettingsAction.Request.parse(parser); + } return restChannel -> client.execute(UpdateSecuritySettingsAction.INSTANCE, req, new RestToXContentListener<>(restChannel)); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/RemoteClusterCredentialsResolver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/RemoteClusterCredentialsResolver.java deleted file mode 100644 index 93735a700bf92..0000000000000 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/RemoteClusterCredentialsResolver.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ - -package org.elasticsearch.xpack.security.transport; - -import org.apache.logging.log4j.LogManager; -import org.apache.logging.log4j.Logger; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.settings.SecureString; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.xpack.security.authc.ApiKeyService; - -import java.util.Map; -import java.util.Optional; - -import static org.elasticsearch.transport.RemoteClusterService.REMOTE_CLUSTER_CREDENTIALS; - -public class RemoteClusterCredentialsResolver { - - private static final Logger logger = LogManager.getLogger(RemoteClusterCredentialsResolver.class); - - private final Map clusterCredentials; - - public RemoteClusterCredentialsResolver(final Settings settings) { - this.clusterCredentials = REMOTE_CLUSTER_CREDENTIALS.getAsMap(settings); - logger.debug( - "Read cluster credentials for remote clusters [{}]", - Strings.collectionToCommaDelimitedString(clusterCredentials.keySet()) - ); - } - - public Optional resolve(final String clusterAlias) { - final SecureString apiKey = clusterCredentials.get(clusterAlias); - if (apiKey == null) { - return Optional.empty(); - } else { - return Optional.of(new RemoteClusterCredentials(clusterAlias, ApiKeyService.withApiKeyPrefix(apiKey.toString()))); - } - } - - record RemoteClusterCredentials(String clusterAlias, String credentials) { - @Override - public String toString() { - return "RemoteClusterCredentials{clusterAlias='" + clusterAlias + "', credentials='::es_redacted::'}"; - } - } -} diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java index 53dd31fe46793..162cabf5297ce 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptor.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.admin.cluster.state.ClusterStateAction; import org.elasticsearch.action.support.DestructiveOperations; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.ssl.SslConfiguration; import org.elasticsearch.common.util.Maps; @@ -24,6 +25,7 @@ import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteConnectionManager; +import org.elasticsearch.transport.RemoteConnectionManager.RemoteClusterAliasWithCredentials; import org.elasticsearch.transport.SendRequestTransportException; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.TransportChannel; @@ -46,6 +48,7 @@ import org.elasticsearch.xpack.core.ssl.SSLService; import org.elasticsearch.xpack.security.Security; import org.elasticsearch.xpack.security.audit.AuditUtil; +import org.elasticsearch.xpack.security.authc.ApiKeyService; import org.elasticsearch.xpack.security.authc.AuthenticationService; import org.elasticsearch.xpack.security.authc.CrossClusterAccessAuthenticationService; import org.elasticsearch.xpack.security.authc.CrossClusterAccessHeaders; @@ -63,7 +66,6 @@ import static org.elasticsearch.transport.RemoteClusterPortSettings.REMOTE_CLUSTER_PROFILE; import static org.elasticsearch.transport.RemoteClusterPortSettings.REMOTE_CLUSTER_SERVER_ENABLED; import static org.elasticsearch.transport.RemoteClusterPortSettings.TRANSPORT_VERSION_ADVANCED_REMOTE_CLUSTER_SECURITY; -import static org.elasticsearch.xpack.security.transport.RemoteClusterCredentialsResolver.RemoteClusterCredentials; public class SecurityServerTransportInterceptor implements TransportInterceptor { @@ -85,8 +87,7 @@ public class SecurityServerTransportInterceptor implements TransportInterceptor private final Settings settings; private final SecurityContext securityContext; private final CrossClusterAccessAuthenticationService crossClusterAccessAuthcService; - private final RemoteClusterCredentialsResolver remoteClusterCredentialsResolver; - private final Function> remoteClusterAliasResolver; + private final Function> remoteClusterCredentialsResolver; private final XPackLicenseState licenseState; public SecurityServerTransportInterceptor( @@ -98,7 +99,6 @@ public SecurityServerTransportInterceptor( SecurityContext securityContext, DestructiveOperations destructiveOperations, CrossClusterAccessAuthenticationService crossClusterAccessAuthcService, - RemoteClusterCredentialsResolver remoteClusterCredentialsResolver, XPackLicenseState licenseState ) { this( @@ -110,9 +110,8 @@ public SecurityServerTransportInterceptor( securityContext, destructiveOperations, crossClusterAccessAuthcService, - remoteClusterCredentialsResolver, licenseState, - RemoteConnectionManager::resolveRemoteClusterAlias + RemoteConnectionManager::resolveRemoteClusterAliasWithCredentials ); } @@ -125,10 +124,9 @@ public SecurityServerTransportInterceptor( SecurityContext securityContext, DestructiveOperations destructiveOperations, CrossClusterAccessAuthenticationService crossClusterAccessAuthcService, - RemoteClusterCredentialsResolver remoteClusterCredentialsResolver, XPackLicenseState licenseState, // Inject for simplified testing - Function> remoteClusterAliasResolver + Function> remoteClusterCredentialsResolver ) { this.settings = settings; this.threadPool = threadPool; @@ -139,7 +137,6 @@ public SecurityServerTransportInterceptor( this.crossClusterAccessAuthcService = crossClusterAccessAuthcService; this.licenseState = licenseState; this.remoteClusterCredentialsResolver = remoteClusterCredentialsResolver; - this.remoteClusterAliasResolver = remoteClusterAliasResolver; this.profileFilters = initializeProfileFilters(destructiveOperations); } @@ -159,7 +156,8 @@ public void sendRequest( TransportResponseHandler handler ) { assertNoCrossClusterAccessHeadersInContext(); - final Optional remoteClusterAlias = remoteClusterAliasResolver.apply(connection); + final Optional remoteClusterAlias = remoteClusterCredentialsResolver.apply(connection) + .map(RemoteClusterAliasWithCredentials::clusterAlias); if (PreAuthorizationUtils.shouldRemoveParentAuthorizationFromThreadContext(remoteClusterAlias, action, securityContext)) { securityContext.executeAfterRemovingParentAuthorization(original -> { sendRequestInner( @@ -278,22 +276,23 @@ public void sendRequest( * Returns cluster credentials if the connection is remote, and cluster credentials are set up for the target cluster. */ private Optional getRemoteClusterCredentials(Transport.Connection connection) { - final Optional optionalRemoteClusterAlias = remoteClusterAliasResolver.apply(connection); - if (optionalRemoteClusterAlias.isEmpty()) { + final Optional remoteClusterAliasWithCredentials = remoteClusterCredentialsResolver + .apply(connection); + if (remoteClusterAliasWithCredentials.isEmpty()) { logger.trace("Connection is not remote"); return Optional.empty(); } - final String remoteClusterAlias = optionalRemoteClusterAlias.get(); - final Optional remoteClusterCredentials = remoteClusterCredentialsResolver.resolve( - remoteClusterAlias - ); - if (remoteClusterCredentials.isEmpty()) { + final String remoteClusterAlias = remoteClusterAliasWithCredentials.get().clusterAlias(); + final SecureString remoteClusterCredentials = remoteClusterAliasWithCredentials.get().credentials(); + if (remoteClusterCredentials == null) { logger.trace("No cluster credentials are configured for remote cluster [{}]", remoteClusterAlias); return Optional.empty(); } - return remoteClusterCredentials; + return Optional.of( + new RemoteClusterCredentials(remoteClusterAlias, ApiKeyService.withApiKeyPrefix(remoteClusterCredentials.toString())) + ); } private void sendWithCrossClusterAccessHeaders( @@ -442,7 +441,7 @@ private void sendWithUser( throw new IllegalStateException("there should always be a user when sending a message for action [" + action + "]"); } - assert securityContext.getParentAuthorization() == null || remoteClusterAliasResolver.apply(connection).isPresent() == false + assert securityContext.getParentAuthorization() == null || remoteClusterCredentialsResolver.apply(connection).isEmpty() : "parent authorization header should not be set for remote cluster requests"; try { @@ -663,4 +662,12 @@ public void onFailure(Exception e) { } } } + + record RemoteClusterCredentials(String clusterAlias, String credentials) { + + @Override + public String toString() { + return "RemoteClusterCredentials{clusterAlias='" + clusterAlias + "', credentials='::es_redacted::'}"; + } + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java index d44e7c27d760e..a2aa04e0f56c3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/LocalStateSecurity.java @@ -16,6 +16,7 @@ import org.elasticsearch.license.LicenseService; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ReloadablePlugin; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.XPackInfoResponse; import org.elasticsearch.protocol.xpack.XPackUsageRequest; @@ -36,7 +37,7 @@ import java.util.Collections; import java.util.List; -public class LocalStateSecurity extends LocalStateCompositeXPackPlugin { +public class LocalStateSecurity extends LocalStateCompositeXPackPlugin implements ReloadablePlugin { public static class SecurityTransportXPackUsageAction extends TransportXPackUsageAction { @Inject @@ -130,4 +131,15 @@ protected Class> public List plugins() { return plugins; } + + @Override + public void reload(Settings settings) throws Exception { + plugins.stream().filter(p -> p instanceof ReloadablePlugin).forEach(p -> { + try { + ((ReloadablePlugin) p).reload(settings); + } catch (Exception e) { + throw new RuntimeException(e); + } + }); + } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java index 6773da137ac96..18929c70cbe7d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/SecurityTests.java @@ -9,10 +9,13 @@ import org.apache.logging.log4j.Level; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; +import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionModule; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.internal.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; @@ -72,6 +75,7 @@ import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.SecurityExtension; import org.elasticsearch.xpack.core.security.SecurityField; +import org.elasticsearch.xpack.core.security.action.ActionTypes; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.AuthenticationTestHelper; import org.elasticsearch.xpack.core.security.authc.Realm; @@ -116,6 +120,7 @@ import java.util.function.Function; import java.util.function.Predicate; import java.util.stream.Collectors; +import java.util.stream.Stream; import static java.util.Collections.emptyMap; import static org.elasticsearch.xpack.core.security.authc.RealmSettings.getFullSettingKey; @@ -133,7 +138,10 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; +import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; public class SecurityTests extends ESTestCase { @@ -574,6 +582,32 @@ public void testValidateForFipsInvalidPasswordHashingAlgorithm() { assertThat(iae.getMessage(), containsString("Only PBKDF2 is allowed for stored credential hashing in a FIPS 140 JVM.")); } + public void testValidateForFipsRequiredProvider() { + final Settings settings = Settings.builder() + .put(XPackSettings.FIPS_MODE_ENABLED.getKey(), true) + .putList(XPackSettings.FIPS_REQUIRED_PROVIDERS.getKey(), List.of("BCFIPS")) + .build(); + if (inFipsJvm()) { + Security.validateForFips(settings); + // no exceptions since gradle has wired in the bouncy castle FIPS provider + } else { + final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> Security.validateForFips(settings)); + assertThat(iae.getMessage(), containsString("Could not find required FIPS security provider: [bcfips]")); + } + + final Settings settings2 = Settings.builder() + .put(XPackSettings.FIPS_MODE_ENABLED.getKey(), true) + .putList(XPackSettings.FIPS_REQUIRED_PROVIDERS.getKey(), List.of("junk0", "BCFIPS", "junk1", "junk2")) + .build(); + if (inFipsJvm()) { + final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> Security.validateForFips(settings2)); + assertThat(iae.getMessage(), containsString("Could not find required FIPS security provider: [junk0, junk1, junk2]")); + } else { + final IllegalArgumentException iae = expectThrows(IllegalArgumentException.class, () -> Security.validateForFips(settings2)); + assertThat(iae.getMessage(), containsString("Could not find required FIPS security provider: [junk0, bcfips, junk1, junk2]")); + } + } + public void testValidateForFipsMultipleValidationErrors() { final Settings settings = Settings.builder() .put(XPackSettings.FIPS_MODE_ENABLED.getKey(), true) @@ -877,6 +911,23 @@ public void testSecurityMustBeEnableToConnectRemoteClusterWithCredentials() { + "Please either enable security or remove these settings from the keystore." ) ); + + // Security off, remote cluster with credentials on reload call + final MockSecureSettings secureSettings5 = new MockSecureSettings(); + secureSettings5.setString("cluster.remote.my1.credentials", randomAlphaOfLength(20)); + secureSettings5.setString("cluster.remote.my2.credentials", randomAlphaOfLength(20)); + final Settings.Builder builder5 = Settings.builder().setSecureSettings(secureSettings5); + // Use builder with security disabled to construct valid Security instance + final var security = new Security(builder2.build()); + final IllegalArgumentException e5 = expectThrows(IllegalArgumentException.class, () -> security.reload(builder5.build())); + assertThat( + e5.getMessage(), + containsString( + "Found [2] remote clusters with credentials [cluster.remote.my1.credentials,cluster.remote.my2.credentials]. " + + "Security [xpack.security.enabled] must be enabled to connect to them. " + + "Please either enable security or remove these settings from the keystore." + ) + ); } public void testLoadExtensions() throws Exception { @@ -905,6 +956,98 @@ public List loadExtensions(Class extensionPointType) { assertThat(registry, instanceOf(DummyOperatorOnlyRegistry.class)); } + public void testReload() throws Exception { + final Settings settings = Settings.builder().put("xpack.security.enabled", true).put("path.home", createTempDir()).build(); + + final PlainActionFuture value = new PlainActionFuture<>(); + final Client mockedClient = mock(Client.class); + + final Realms mockedRealms = mock(Realms.class); + when(mockedRealms.stream()).thenReturn(Stream.of()); + + doAnswer((inv) -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) inv.getArguments()[2]; + listener.onResponse(ActionResponse.Empty.INSTANCE); + return null; + }).when(mockedClient).execute(eq(ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION), any(), any()); + + security = new Security(settings, Collections.emptyList()) { + @Override + protected Client getClient() { + return mockedClient; + } + + @Override + protected Realms getRealms() { + return mockedRealms; + } + }; + + final Settings inputSettings = Settings.EMPTY; + security.reload(inputSettings); + + verify(mockedClient).execute(eq(ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION), any(), any()); + verify(mockedRealms).stream(); + } + + public void testReloadWithFailures() { + final Settings settings = Settings.builder().put("xpack.security.enabled", true).put("path.home", createTempDir()).build(); + + final boolean failRemoteClusterCredentialsReload = randomBoolean(); + final Client mockedClient = mock(Client.class); + if (failRemoteClusterCredentialsReload) { + doAnswer((inv) -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) inv.getArguments()[2]; + listener.onFailure(new RuntimeException("failed remote cluster credentials reload")); + return null; + }).when(mockedClient).execute(eq(ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION), any(), any()); + } else { + doAnswer((inv) -> { + @SuppressWarnings("unchecked") + ActionListener listener = (ActionListener) inv.getArguments()[2]; + listener.onResponse(ActionResponse.Empty.INSTANCE); + return null; + }).when(mockedClient).execute(eq(ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION), any(), any()); + } + + final Realms mockedRealms = mock(Realms.class); + final boolean failRealmsReload = (false == failRemoteClusterCredentialsReload) || randomBoolean(); + if (failRealmsReload) { + when(mockedRealms.stream()).thenThrow(new RuntimeException("failed jwt realms reload")); + } else { + when(mockedRealms.stream()).thenReturn(Stream.of()); + } + security = new Security(settings, Collections.emptyList()) { + @Override + protected Client getClient() { + return mockedClient; + } + + @Override + protected Realms getRealms() { + return mockedRealms; + } + }; + + final Settings inputSettings = Settings.EMPTY; + final var exception = expectThrows(ElasticsearchException.class, () -> security.reload(inputSettings)); + + assertThat(exception.getMessage(), containsString("secure settings reload failed for one or more security component")); + if (failRemoteClusterCredentialsReload) { + assertThat(exception.getSuppressed()[0].getMessage(), containsString("failed remote cluster credentials reload")); + if (failRealmsReload) { + assertThat(exception.getSuppressed()[1].getMessage(), containsString("failed jwt realms reload")); + } + } else { + assertThat(exception.getSuppressed()[0].getMessage(), containsString("failed jwt realms reload")); + } + // Verify both called despite failure + verify(mockedClient).execute(eq(ActionTypes.RELOAD_REMOTE_CLUSTER_CREDENTIALS_ACTION), any(), any()); + verify(mockedRealms).stream(); + } + public void testLoadNoExtensions() throws Exception { Settings settings = Settings.builder() .put("xpack.security.enabled", true) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java index 8743453d33a35..e50d6cbac5338 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/saml/TransportSamlInvalidateSessionActionTests.java @@ -28,7 +28,6 @@ import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.search.TransportClearScrollAction; import org.elasticsearch.action.search.TransportSearchAction; @@ -201,15 +200,13 @@ protected void ActionListener.respondAndRelease( listener, (Response) new SearchResponse( - new SearchResponseSections( - new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), - null, - null, - false, - false, - null, - 1 - ), + new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), + null, + null, + false, + false, + null, + 1, "_scrollId1", 1, 1, @@ -225,15 +222,13 @@ protected void ActionListener.respondAndRelease( listener, (Response) new SearchResponse( - new SearchResponseSections( - new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), - null, - null, - false, - false, - null, - 1 - ), + new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), + null, + null, + false, + false, + null, + 1, "_scrollId1", 1, 1, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java index 25194ca1e0234..cd297abe8f28d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ApiKeyServiceTests.java @@ -60,7 +60,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ClusterServiceUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.MockLogAppender; @@ -411,25 +410,31 @@ public void testInvalidateApiKeysWillSetInvalidatedFlagAndRecordTimestamp() { builder.map(buildApiKeySourceDoc("some_hash".toCharArray())); searchHit.sourceRef(BytesReference.bytes(builder)); } - final var internalSearchResponse = new InternalSearchResponse( - new SearchHits( - new SearchHit[] { searchHit }, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), - randomFloat(), + ActionListener.respondAndRelease( + listener, + new SearchResponse( + new SearchHits( + new SearchHit[] { searchHit }, + new TotalHits(1, TotalHits.Relation.EQUAL_TO), + randomFloat(), + null, + null, + null + ), + null, + null, + false, + null, null, + 0, + randomAlphaOfLengthBetween(3, 8), + 1, + 1, + 0, + 10, null, null - ), - null, - null, - null, - false, - null, - 0 - ); - ActionListener.respondAndRelease( - listener, - new SearchResponse(internalSearchResponse, randomAlphaOfLengthBetween(3, 8), 1, 1, 0, 10, null, null) + ) ); return null; }).when(client).search(any(SearchRequest.class), anyActionListener()); @@ -753,22 +758,20 @@ public void testCrossClusterApiKeyUsageStats() { ActionListener.respondAndRelease( listener, new SearchResponse( - new InternalSearchResponse( - new SearchHits( - searchHits.toArray(SearchHit[]::new), - new TotalHits(searchHits.size(), TotalHits.Relation.EQUAL_TO), - randomFloat(), - null, - null, - null - ), - null, - null, + new SearchHits( + searchHits.toArray(SearchHit[]::new), + new TotalHits(searchHits.size(), TotalHits.Relation.EQUAL_TO), + randomFloat(), null, - false, null, - 0 + null ), + null, + null, + false, + null, + null, + 0, randomAlphaOfLengthBetween(3, 8), 1, 1, @@ -2243,13 +2246,11 @@ public void testMaybeBuildUpdatedDocument() throws IOException { assertEquals(realm.getType(), updatedApiKeyDoc.creator.get("realm_type")); if (realm.getDomain() != null) { @SuppressWarnings("unchecked") - final var actualRealmDomain = RealmDomain.fromXContent( - XContentHelper.mapToXContentParser( - XContentParserConfiguration.EMPTY, - (Map) updatedApiKeyDoc.creator.get("realm_domain") - ) - ); - assertEquals(realm.getDomain(), actualRealmDomain); + var m = (Map) updatedApiKeyDoc.creator.get("realm_domain"); + try (var p = XContentHelper.mapToXContentParser(XContentParserConfiguration.EMPTY, m)) { + final var actualRealmDomain = RealmDomain.fromXContent(p); + assertEquals(realm.getDomain(), actualRealmDomain); + } } else { assertFalse(updatedApiKeyDoc.creator.containsKey("realm_domain")); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java index 57bda2ad9cc1d..09a7e33e51901 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java @@ -54,6 +54,10 @@ protected static RealmConfig config(RealmConfig.RealmIdentifier realmId, Setting @Before public void setUpLdapConnection() throws Exception { + doSetupLdapConnection(); + } + + protected void doSetupLdapConnection() throws Exception { Path trustPath = getDataPath(trustPath()); this.ldapConnection = LdapTestUtils.openConnection(ldapUrl(), bindDN(), bindPassword(), trustPath); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java index 8d5d89b4c5054..3a9fee4288bf2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/service/IndexServiceAccountTokenStoreTests.java @@ -42,7 +42,6 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.XContentTestUtils; import org.elasticsearch.threadpool.ThreadPool; @@ -270,19 +269,24 @@ public void testFindTokensFor() { ) ) .toArray(SearchHit[]::new); - final InternalSearchResponse internalSearchResponse; - internalSearchResponse = new InternalSearchResponse( - new SearchHits(hits, new TotalHits(nhits, TotalHits.Relation.EQUAL_TO), randomFloat(), null, null, null), - null, - null, - null, - false, - null, - 0 - ); ActionListener.respondAndRelease( l, - new SearchResponse(internalSearchResponse, randomAlphaOfLengthBetween(3, 8), 1, 1, 0, 10, null, null) + new SearchResponse( + new SearchHits(hits, new TotalHits(nhits, TotalHits.Relation.EQUAL_TO), randomFloat(), null, null, null), + null, + null, + false, + null, + null, + 0, + randomAlphaOfLengthBetween(3, 8), + 1, + 1, + 0, + 10, + null, + null + ) ); } else if (r instanceof ClearScrollRequest) { l.onResponse(new ClearScrollResponse(true, 1)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java index 3b52f86c00ba8..169275ccc3ee3 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/support/mapper/NativeRoleMappingStoreTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.script.mustache.MustacheScriptEngine; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; @@ -355,25 +354,31 @@ private void doAnswerWithSearchResult(Client client, ExpressionRoleMapping mappi mapping.toXContent(builder, ToXContent.EMPTY_PARAMS); searchHit.sourceRef(BytesReference.bytes(builder)); } - final var internalSearchResponse = new InternalSearchResponse( - new SearchHits( - new SearchHit[] { searchHit }, - new TotalHits(1, TotalHits.Relation.EQUAL_TO), - randomFloat(), + ActionListener.respondAndRelease( + listener, + new SearchResponse( + new SearchHits( + new SearchHit[] { searchHit }, + new TotalHits(1, TotalHits.Relation.EQUAL_TO), + randomFloat(), + null, + null, + null + ), + null, + null, + false, + null, null, + 0, + randomAlphaOfLengthBetween(3, 8), + 1, + 1, + 0, + 10, null, null - ), - null, - null, - null, - false, - null, - 0 - ); - ActionListener.respondAndRelease( - listener, - new SearchResponse(internalSearchResponse, randomAlphaOfLengthBetween(3, 8), 1, 1, 0, 10, null, null) + ) ); return null; }).when(client).search(any(SearchRequest.class), anyActionListener()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java index 4cabe5a8ec3ba..ad05cb20ffbbf 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/AuthorizationServiceTests.java @@ -7,6 +7,7 @@ package org.elasticsearch.xpack.security.authz; import org.elasticsearch.ElasticsearchSecurityException; +import org.elasticsearch.TransportVersion; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.CompositeIndicesRequest; import org.elasticsearch.action.DocWriteRequest; @@ -61,6 +62,7 @@ import org.elasticsearch.action.search.MultiSearchRequest; import org.elasticsearch.action.search.OpenPointInTimeRequest; import org.elasticsearch.action.search.ParsedScrollId; +import org.elasticsearch.action.search.SearchContextId; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.search.SearchTransportService; @@ -101,6 +103,7 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; +import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.bulk.stats.BulkOperationListener; @@ -110,7 +113,12 @@ import org.elasticsearch.license.MockLicenseState; import org.elasticsearch.license.XPackLicenseState; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.search.SearchPhaseResult; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.search.builder.PointInTimeBuilder; +import org.elasticsearch.search.builder.SearchSourceBuilder; import org.elasticsearch.search.internal.AliasFilter; +import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.internal.ShardSearchRequest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -1229,6 +1237,72 @@ public void testSearchAgainstIndex() { verifyNoMoreInteractions(auditTrail); } + public void testSearchPITAgainstIndex() { + RoleDescriptor role = new RoleDescriptor( + "search_index", + null, + new IndicesPrivileges[] { IndicesPrivileges.builder().indices("index-*").privileges("read").build() }, + null + ); + roleMap.put(role.getName(), role); + final Authentication authentication = createAuthentication(new User("test search user", role.getName())); + + final String requestId = AuditUtil.getOrGenerateRequestId(threadContext); + final String indexName = "index-" + randomAlphaOfLengthBetween(1, 5); + + final ClusterState clusterState = mockMetadataWithIndex(indexName); + final IndexMetadata indexMetadata = clusterState.metadata().index(indexName); + + PointInTimeBuilder pit = new PointInTimeBuilder(createEncodedPIT(indexMetadata.getIndex())); + SearchRequest searchRequest = new SearchRequest().source(new SearchSourceBuilder().pointInTimeBuilder(pit)) + .allowPartialSearchResults(false); + final ShardSearchRequest shardRequest = new ShardSearchRequest( + new OriginalIndices(new String[] { indexName }, searchRequest.indicesOptions()), + searchRequest, + new ShardId(indexMetadata.getIndex(), 0), + 0, + 1, + AliasFilter.EMPTY, + 1.0f, + System.currentTimeMillis(), + null + ); + this.setFakeOriginatingAction = false; + authorize(authentication, TransportSearchAction.TYPE.name(), searchRequest, true, () -> { + verify(rolesStore).getRoles(Mockito.same(authentication), Mockito.any()); + IndicesAccessControl iac = threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY); + // Successful search action authorization should set a parent authorization header. + assertThat(securityContext.getParentAuthorization().action(), equalTo(TransportSearchAction.TYPE.name())); + // Within the action handler, execute a child action (the query phase of search) + authorize(authentication, SearchTransportService.QUERY_ACTION_NAME, shardRequest, false, () -> { + // This child action triggers a second interaction with the role store (which is cached) + verify(rolesStore, times(2)).getRoles(Mockito.same(authentication), Mockito.any()); + // But it does not create a new IndicesAccessControl + assertThat(threadContext.getTransient(AuthorizationServiceField.INDICES_PERMISSIONS_KEY), sameInstance(iac)); + // The parent authorization header should only be present for direct child actions + // and not be carried over for a child of a child actions. + // Meaning, only query phase action should be pre-authorized in this case and potential sub-actions should not. + assertThat(securityContext.getParentAuthorization(), nullValue()); + }); + }); + assertThat(searchRequest.indices().length, equalTo(0)); + verify(auditTrail).accessGranted( + eq(requestId), + eq(authentication), + eq(TransportSearchAction.TYPE.name()), + eq(searchRequest), + authzInfoRoles(new String[] { role.getName() }) + ); + verify(auditTrail).accessGranted( + eq(requestId), + eq(authentication), + eq(SearchTransportService.QUERY_ACTION_NAME), + eq(shardRequest), + authzInfoRoles(new String[] { role.getName() }) + ); + verifyNoMoreInteractions(auditTrail); + } + public void testScrollRelatedRequestsAllowed() { RoleDescriptor role = new RoleDescriptor( "a_all", @@ -3545,6 +3619,26 @@ static AuthorizationInfo authzInfoRoles(String[] expectedRoles) { return ArgumentMatchers.argThat(new RBACAuthorizationInfoRoleMatcher(expectedRoles)); } + private static class TestSearchPhaseResult extends SearchPhaseResult { + final DiscoveryNode node; + + TestSearchPhaseResult(ShardSearchContextId contextId, DiscoveryNode node) { + this.contextId = contextId; + this.node = node; + } + } + + private static String createEncodedPIT(Index index) { + DiscoveryNode node1 = DiscoveryNodeUtils.create("node_1"); + TestSearchPhaseResult testSearchPhaseResult1 = new TestSearchPhaseResult(new ShardSearchContextId("a", 1), node1); + testSearchPhaseResult1.setSearchShardTarget( + new SearchShardTarget("node_1", new ShardId(index.getName(), index.getUUID(), 0), null) + ); + List results = new ArrayList<>(); + results.add(testSearchPhaseResult1); + return SearchContextId.encode(results, Collections.emptyMap(), TransportVersion.current()); + } + private static class RBACAuthorizationInfoRoleMatcher implements ArgumentMatcher { private final String[] wanted; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java index d229124419cb2..afe5f32f70d28 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/store/NativePrivilegeStoreTests.java @@ -22,7 +22,6 @@ import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.action.support.WriteRequest; @@ -822,15 +821,13 @@ private SearchHit[] buildHits(List sourcePrivile private static SearchResponse buildSearchResponse(SearchHit[] hits) { return new SearchResponse( - new SearchResponseSections( - new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), - null, - null, - false, - false, - null, - 1 - ), + new SearchHits(hits, new TotalHits(hits.length, TotalHits.Relation.EQUAL_TO), 0f), + null, + null, + false, + false, + null, + 1, "_scrollId1", 1, 1, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/SecurityRestFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/SecurityRestFilterTests.java index 69884cd1e6dbd..d3b46f5847636 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/SecurityRestFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/rest/SecurityRestFilterTests.java @@ -230,13 +230,17 @@ public Set getFilteredFields() { assertEquals(restRequest, handlerRequest.get()); assertEquals(restRequest.content(), handlerRequest.get().content()); - Map original = XContentType.JSON.xContent() - .createParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - handlerRequest.get().content().streamInput() - ) - .map(); + Map original; + try ( + var parser = XContentType.JSON.xContent() + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + handlerRequest.get().content().streamInput() + ) + ) { + original = parser.map(); + } assertEquals(2, original.size()); assertEquals(SecuritySettingsSourceField.TEST_PASSWORD, original.get("password")); assertEquals("bar", original.get("foo")); @@ -244,13 +248,17 @@ public Set getFilteredFields() { assertNotEquals(restRequest, auditTrailRequest.get()); assertNotEquals(restRequest.content(), auditTrailRequest.get().content()); - Map map = XContentType.JSON.xContent() - .createParser( - NamedXContentRegistry.EMPTY, - DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - auditTrailRequest.get().content().streamInput() - ) - .map(); + Map map; + try ( + var parser = XContentType.JSON.xContent() + .createParser( + NamedXContentRegistry.EMPTY, + DeprecationHandler.THROW_UNSUPPORTED_OPERATION, + auditTrailRequest.get().content().streamInput() + ) + ) { + map = parser.map(); + } assertEquals(1, map.size()); assertEquals("bar", map.get("foo")); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/RemoteClusterCredentialsResolverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/RemoteClusterCredentialsResolverTests.java deleted file mode 100644 index debb50384e217..0000000000000 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/RemoteClusterCredentialsResolverTests.java +++ /dev/null @@ -1,38 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License - * 2.0; you may not use this file except in compliance with the Elastic License - * 2.0. - */ -package org.elasticsearch.xpack.security.transport; - -import org.elasticsearch.common.settings.MockSecureSettings; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.security.authc.ApiKeyService; - -import java.util.Optional; - -import static org.elasticsearch.xpack.security.transport.RemoteClusterCredentialsResolver.RemoteClusterCredentials; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; - -public class RemoteClusterCredentialsResolverTests extends ESTestCase { - - public void testResolveRemoteClusterCredentials() { - final String clusterNameA = "clusterA"; - final String clusterDoesNotExist = randomAlphaOfLength(10); - final Settings.Builder builder = Settings.builder(); - - final String secret = randomAlphaOfLength(20); - final MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("cluster.remote." + clusterNameA + ".credentials", secret); - final Settings settings = builder.setSecureSettings(secureSettings).build(); - RemoteClusterCredentialsResolver remoteClusterAuthorizationResolver = new RemoteClusterCredentialsResolver(settings); - final Optional remoteClusterCredentials = remoteClusterAuthorizationResolver.resolve(clusterNameA); - assertThat(remoteClusterCredentials.isPresent(), is(true)); - assertThat(remoteClusterCredentials.get().clusterAlias(), equalTo(clusterNameA)); - assertThat(remoteClusterCredentials.get().credentials(), equalTo(ApiKeyService.withApiKeyPrefix(secret))); - assertThat(remoteClusterAuthorizationResolver.resolve(clusterDoesNotExist), is(Optional.empty())); - } -} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java index 57e48581d159c..46b0fac78ad8e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityServerTransportInterceptorTests.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.ssl.SslClientAuthenticationMode; import org.elasticsearch.common.ssl.SslConfiguration; @@ -33,6 +34,7 @@ import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.RemoteClusterPortSettings; +import org.elasticsearch.transport.RemoteConnectionManager.RemoteClusterAliasWithCredentials; import org.elasticsearch.transport.SendRequestTransportException; import org.elasticsearch.transport.Transport; import org.elasticsearch.transport.Transport.Connection; @@ -77,6 +79,7 @@ import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicReference; import java.util.function.Consumer; +import java.util.function.Function; import static org.elasticsearch.cluster.metadata.DataStreamLifecycle.DATA_STREAM_LIFECYCLE_ORIGIN; import static org.elasticsearch.test.ActionListenerUtils.anyActionListener; @@ -87,7 +90,6 @@ import static org.elasticsearch.xpack.core.security.authc.CrossClusterAccessSubjectInfo.CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY; import static org.elasticsearch.xpack.core.security.authz.RoleDescriptorTests.randomUniquelyNamedRoleDescriptors; import static org.elasticsearch.xpack.security.authc.CrossClusterAccessHeaders.CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY; -import static org.elasticsearch.xpack.security.transport.RemoteClusterCredentialsResolver.RemoteClusterCredentials; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.equalTo; @@ -153,7 +155,6 @@ public void testSendAsync() throws Exception { new ClusterSettings(Settings.EMPTY, Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)) ), mock(CrossClusterAccessAuthenticationService.class), - new RemoteClusterCredentialsResolver(settings), mockLicenseState ); ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener @@ -205,7 +206,6 @@ public void testSendAsyncSwitchToSystem() throws Exception { new ClusterSettings(Settings.EMPTY, Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)) ), mock(CrossClusterAccessAuthenticationService.class), - new RemoteClusterCredentialsResolver(settings), mockLicenseState ); ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener @@ -250,7 +250,6 @@ public void testSendWithoutUser() throws Exception { new ClusterSettings(Settings.EMPTY, Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)) ), mock(CrossClusterAccessAuthenticationService.class), - new RemoteClusterCredentialsResolver(settings), mockLicenseState ) { @Override @@ -313,7 +312,6 @@ public void testSendToNewerVersionSetsCorrectVersion() throws Exception { new ClusterSettings(Settings.EMPTY, Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)) ), mock(CrossClusterAccessAuthenticationService.class), - new RemoteClusterCredentialsResolver(settings), mockLicenseState ); ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener @@ -382,7 +380,6 @@ public void testSendToOlderVersionSetsCorrectVersion() throws Exception { new ClusterSettings(Settings.EMPTY, Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)) ), mock(CrossClusterAccessAuthenticationService.class), - new RemoteClusterCredentialsResolver(settings), mockLicenseState ); ClusterServiceUtils.setState(clusterService, clusterService.state()); // force state update to trigger listener @@ -449,7 +446,6 @@ public void testSetUserBasedOnActionOrigin() { new ClusterSettings(Settings.EMPTY, Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)) ), mock(CrossClusterAccessAuthenticationService.class), - new RemoteClusterCredentialsResolver(settings), mockLicenseState ); @@ -604,7 +600,6 @@ public void testSendWithCrossClusterAccessHeadersWithUnsupportedLicense() throws AuthenticationTestHelper.builder().build().writeToContext(threadContext); final String remoteClusterAlias = randomAlphaOfLengthBetween(5, 10); - final RemoteClusterCredentialsResolver remoteClusterCredentialsResolver = mockRemoteClusterCredentialsResolver(remoteClusterAlias); final SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor( settings, @@ -618,9 +613,8 @@ public void testSendWithCrossClusterAccessHeadersWithUnsupportedLicense() throws new ClusterSettings(Settings.EMPTY, Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)) ), mock(CrossClusterAccessAuthenticationService.class), - remoteClusterCredentialsResolver, unsupportedLicenseState, - ignored -> Optional.of(remoteClusterAlias) + mockRemoteClusterCredentialsResolver(remoteClusterAlias) ); final AsyncSender sender = interceptor.interceptSender(mock(AsyncSender.class, ignored -> { @@ -661,18 +655,16 @@ public TransportResponse read(StreamInput in) { actualException.get().getCause().getMessage(), equalTo("current license is non-compliant for [" + Security.ADVANCED_REMOTE_CLUSTER_SECURITY_FEATURE.getName() + "]") ); - verify(remoteClusterCredentialsResolver, times(1)).resolve(eq(remoteClusterAlias)); assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY), nullValue()); assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY), nullValue()); } - private RemoteClusterCredentialsResolver mockRemoteClusterCredentialsResolver(String remoteClusterAlias) { - final RemoteClusterCredentialsResolver remoteClusterCredentialsResolver = mock(RemoteClusterCredentialsResolver.class); - final String remoteClusterCredential = ApiKeyService.withApiKeyPrefix(randomAlphaOfLengthBetween(10, 42)); - when(remoteClusterCredentialsResolver.resolve(any())).thenReturn( - Optional.of(new RemoteClusterCredentials(remoteClusterAlias, remoteClusterCredential)) + private Function> mockRemoteClusterCredentialsResolver( + String remoteClusterAlias + ) { + return connection -> Optional.of( + new RemoteClusterAliasWithCredentials(remoteClusterAlias, new SecureString(randomAlphaOfLengthBetween(10, 42).toCharArray())) ); - return remoteClusterCredentialsResolver; } public void testSendWithCrossClusterAccessHeadersForSystemUserRegularAction() throws Exception { @@ -736,12 +728,9 @@ private void doTestSendWithCrossClusterAccessHeaders( ) throws IOException { authentication.writeToContext(threadContext); final String expectedRequestId = AuditUtil.getOrGenerateRequestId(threadContext); - final RemoteClusterCredentialsResolver remoteClusterCredentialsResolver = mock(RemoteClusterCredentialsResolver.class); final String remoteClusterAlias = randomAlphaOfLengthBetween(5, 10); - final String remoteClusterCredential = ApiKeyService.withApiKeyPrefix(randomAlphaOfLengthBetween(10, 42)); - when(remoteClusterCredentialsResolver.resolve(any())).thenReturn( - Optional.of(new RemoteClusterCredentials(remoteClusterAlias, remoteClusterCredential)) - ); + final String encodedApiKey = randomAlphaOfLengthBetween(10, 42); + final String remoteClusterCredential = ApiKeyService.withApiKeyPrefix(encodedApiKey); final AuthorizationService authzService = mock(AuthorizationService.class); // We capture the listener so that we can complete the full flow, by calling onResponse further down @SuppressWarnings("unchecked") @@ -760,9 +749,8 @@ private void doTestSendWithCrossClusterAccessHeaders( new ClusterSettings(Settings.EMPTY, Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)) ), mock(CrossClusterAccessAuthenticationService.class), - remoteClusterCredentialsResolver, mockLicenseState, - ignored -> Optional.of(remoteClusterAlias) + ignored -> Optional.of(new RemoteClusterAliasWithCredentials(remoteClusterAlias, new SecureString(encodedApiKey.toCharArray()))) ); final AtomicBoolean calledWrappedSender = new AtomicBoolean(false); @@ -861,7 +849,6 @@ public TransportResponse read(StreamInput in) { } assertThat(sentCredential.get(), equalTo(remoteClusterCredential)); verify(securityContext, never()).executeAsInternalUser(any(), any(), anyConsumer()); - verify(remoteClusterCredentialsResolver, times(1)).resolve(eq(remoteClusterAlias)); assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY), nullValue()); assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY), nullValue()); assertThat(AuditUtil.extractRequestId(securityContext.getThreadContext()), equalTo(expectedRequestId)); @@ -874,15 +861,9 @@ public void testSendWithUserIfCrossClusterAccessHeadersConditionNotMet() throws if (false == (notRemoteConnection || noCredential)) { noCredential = true; } + final boolean finalNoCredential = noCredential; final String remoteClusterAlias = randomAlphaOfLengthBetween(5, 10); - final RemoteClusterCredentialsResolver remoteClusterCredentialsResolver = mock(RemoteClusterCredentialsResolver.class); - when(remoteClusterCredentialsResolver.resolve(any())).thenReturn( - noCredential - ? Optional.empty() - : Optional.of( - new RemoteClusterCredentials(remoteClusterAlias, ApiKeyService.withApiKeyPrefix(randomAlphaOfLengthBetween(10, 42))) - ) - ); + final String encodedApiKey = randomAlphaOfLengthBetween(10, 42); final AuthenticationTestHelper.AuthenticationTestBuilder builder = AuthenticationTestHelper.builder(); final Authentication authentication = randomFrom( builder.apiKey().build(), @@ -904,9 +885,12 @@ public void testSendWithUserIfCrossClusterAccessHeadersConditionNotMet() throws new ClusterSettings(Settings.EMPTY, Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)) ), mock(CrossClusterAccessAuthenticationService.class), - remoteClusterCredentialsResolver, mockLicenseState, - ignored -> notRemoteConnection ? Optional.empty() : Optional.of(remoteClusterAlias) + ignored -> notRemoteConnection + ? Optional.empty() + : (finalNoCredential + ? Optional.of(new RemoteClusterAliasWithCredentials(remoteClusterAlias, null)) + : Optional.of(new RemoteClusterAliasWithCredentials(remoteClusterAlias, new SecureString(encodedApiKey.toCharArray())))) ); final AtomicBoolean calledWrappedSender = new AtomicBoolean(false); @@ -944,12 +928,9 @@ public void testSendWithCrossClusterAccessHeadersThrowsOnOldConnection() throws .realm() .build(); authentication.writeToContext(threadContext); - final RemoteClusterCredentialsResolver remoteClusterCredentialsResolver = mock(RemoteClusterCredentialsResolver.class); final String remoteClusterAlias = randomAlphaOfLengthBetween(5, 10); - final String remoteClusterCredential = ApiKeyService.withApiKeyPrefix(randomAlphaOfLengthBetween(10, 42)); - when(remoteClusterCredentialsResolver.resolve(any())).thenReturn( - Optional.of(new RemoteClusterCredentials(remoteClusterAlias, remoteClusterCredential)) - ); + final String encodedApiKey = randomAlphaOfLengthBetween(10, 42); + final String remoteClusterCredential = ApiKeyService.withApiKeyPrefix(encodedApiKey); final SecurityServerTransportInterceptor interceptor = new SecurityServerTransportInterceptor( settings, @@ -963,9 +944,8 @@ public void testSendWithCrossClusterAccessHeadersThrowsOnOldConnection() throws new ClusterSettings(Settings.EMPTY, Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)) ), mock(CrossClusterAccessAuthenticationService.class), - remoteClusterCredentialsResolver, mockLicenseState, - ignored -> Optional.of(remoteClusterAlias) + ignored -> Optional.of(new RemoteClusterAliasWithCredentials(remoteClusterAlias, new SecureString(encodedApiKey.toCharArray()))) ); final AsyncSender sender = interceptor.interceptSender(new AsyncSender() { @@ -1029,7 +1009,6 @@ public TransportResponse read(StreamInput in) { + "] does not support receiving them" ) ); - verify(remoteClusterCredentialsResolver, times(1)).resolve(eq(remoteClusterAlias)); assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_SUBJECT_INFO_HEADER_KEY), nullValue()); assertThat(securityContext.getThreadContext().getHeader(CROSS_CLUSTER_ACCESS_CREDENTIALS_HEADER_KEY), nullValue()); } @@ -1040,12 +1019,9 @@ public void testSendRemoteRequestFailsIfUserHasNoRemoteIndicesPrivileges() throw .realm() .build(); authentication.writeToContext(threadContext); - final RemoteClusterCredentialsResolver remoteClusterCredentialsResolver = mock(RemoteClusterCredentialsResolver.class); final String remoteClusterAlias = randomAlphaOfLengthBetween(5, 10); - final String remoteClusterCredential = ApiKeyService.withApiKeyPrefix(randomAlphaOfLengthBetween(10, 42)); - when(remoteClusterCredentialsResolver.resolve(any())).thenReturn( - Optional.of(new RemoteClusterCredentials(remoteClusterAlias, remoteClusterCredential)) - ); + final String encodedApiKey = randomAlphaOfLengthBetween(10, 42); + final String remoteClusterCredential = ApiKeyService.withApiKeyPrefix(encodedApiKey); final AuthorizationService authzService = mock(AuthorizationService.class); doAnswer(invocation -> { @@ -1067,9 +1043,8 @@ public void testSendRemoteRequestFailsIfUserHasNoRemoteIndicesPrivileges() throw new ClusterSettings(Settings.EMPTY, Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)) ), mock(CrossClusterAccessAuthenticationService.class), - remoteClusterCredentialsResolver, mockLicenseState, - ignored -> Optional.of(remoteClusterAlias) + ignored -> Optional.of(new RemoteClusterAliasWithCredentials(remoteClusterAlias, new SecureString(encodedApiKey.toCharArray()))) ); final AsyncSender sender = interceptor.interceptSender(new AsyncSender() { @@ -1171,7 +1146,6 @@ public void testProfileFiltersCreatedDifferentlyForDifferentTransportAndRemoteCl new ClusterSettings(Settings.EMPTY, Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)) ), mock(CrossClusterAccessAuthenticationService.class), - new RemoteClusterCredentialsResolver(settings), mockLicenseState ); @@ -1225,7 +1199,6 @@ public void testNoProfileFilterForRemoteClusterWhenTheFeatureIsDisabled() { new ClusterSettings(Settings.EMPTY, Collections.singleton(DestructiveOperations.REQUIRES_NAME_SETTING)) ), mock(CrossClusterAccessAuthenticationService.class), - new RemoteClusterCredentialsResolver(settings), mockLicenseState ); diff --git a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java index a578c4b7d21fd..31c624df67813 100644 --- a/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java +++ b/x-pack/plugin/slm/src/main/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistry.java @@ -41,10 +41,12 @@ public class SnapshotLifecycleTemplateRegistry extends IndexTemplateRegistry { // version 3: templates moved to composable templates // version 4:converted data stream // version 5: add `allow_auto_create` setting - public static final int INDEX_TEMPLATE_VERSION = 5; + // version 6: manage by data stream lifecycle + // version 7: version the index template name so we can upgrade existing deployments + public static final int INDEX_TEMPLATE_VERSION = 7; public static final String SLM_TEMPLATE_VERSION_VARIABLE = "xpack.slm.template.version"; - public static final String SLM_TEMPLATE_NAME = ".slm-history"; + public static final String SLM_TEMPLATE_NAME = ".slm-history-" + INDEX_TEMPLATE_VERSION; public static final String SLM_POLICY_NAME = "slm-history-ilm-policy"; diff --git a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java index af3676eb24e0f..3601b3c010739 100644 --- a/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java +++ b/x-pack/plugin/slm/src/test/java/org/elasticsearch/xpack/slm/history/SnapshotLifecycleTemplateRegistryTests.java @@ -64,6 +64,7 @@ import static org.elasticsearch.xpack.slm.history.SnapshotLifecycleTemplateRegistry.SLM_POLICY_NAME; import static org.elasticsearch.xpack.slm.history.SnapshotLifecycleTemplateRegistry.SLM_TEMPLATE_NAME; import static org.hamcrest.Matchers.anEmptyMap; +import static org.hamcrest.Matchers.endsWith; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.hasSize; @@ -317,6 +318,10 @@ public void testValidate() { ); } + public void testTemplateNameIsVersioned() { + assertThat(SLM_TEMPLATE_NAME, endsWith("-" + INDEX_TEMPLATE_VERSION)); + } + // ------------- /** @@ -365,7 +370,6 @@ private ActionResponse verifyTemplateInstalled( assertThat(request, instanceOf(PutComposableIndexTemplateAction.Request.class)); final PutComposableIndexTemplateAction.Request putRequest = (PutComposableIndexTemplateAction.Request) request; assertThat(putRequest.name(), equalTo(SLM_TEMPLATE_NAME)); - assertThat(putRequest.indexTemplate().template().settings().get("index.lifecycle.name"), equalTo(SLM_POLICY_NAME)); assertThat(putRequest.indexTemplate().version(), equalTo((long) INDEX_TEMPLATE_VERSION)); assertNotNull(listener); return new TestPutIndexTemplateResponse(true); diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java index 8364ad3d4c027..7c5fa5053222b 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/GeoShapeWithDocValuesFieldTypeTests.java @@ -25,16 +25,18 @@ import org.elasticsearch.index.mapper.MapperBuilderContext; import org.elasticsearch.xpack.vectortile.SpatialGeometryFormatterExtension; import org.elasticsearch.xpack.vectortile.feature.FeatureFactory; -import org.hamcrest.Matchers; import java.io.IOException; import java.nio.ByteOrder; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.instanceOf; + public class GeoShapeWithDocValuesFieldTypeTests extends FieldTypeTestCase { - public void testFetchSourceValue() throws IOException { + public void testFetchSourceValue() throws Exception { final GeoFormatterFactory geoFormatterFactory = new GeoFormatterFactory<>( new SpatialGeometryFormatterExtension().getGeometryFormatterFactories() ); @@ -53,26 +55,43 @@ public void testFetchSourceValue() throws IOException { String wktLineString = "LINESTRING (42.0 27.1, 30.0 50.0)"; String wktPoint = "POINT (14.0 15.0)"; String wktMalformed = "POINT foo"; + byte[] wkbLine = WellKnownBinary.toWKB( + WellKnownText.fromWKT(StandardValidator.NOOP, false, wktLineString), + ByteOrder.LITTLE_ENDIAN + ); + byte[] wkbPoint = WellKnownBinary.toWKB(WellKnownText.fromWKT(StandardValidator.NOOP, false, wktPoint), ByteOrder.LITTLE_ENDIAN); // Test a single shape in geojson format. Object sourceValue = jsonLineString; assertEquals(List.of(jsonLineString), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktLineString), fetchSourceValue(mapper, sourceValue, "wkt")); + List wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbLine)); // Test a malformed single shape in geojson format sourceValue = jsonMalformed; assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkt")); + assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkb")); // Test a list of shapes in geojson format. sourceValue = List.of(jsonLineString, jsonPoint); assertEquals(List.of(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbLine)); + assertThat(wkb.get(1), equalTo(wkbPoint)); // Test a list of shapes including one malformed in geojson format sourceValue = List.of(jsonLineString, jsonMalformed, jsonPoint); assertEquals(List.of(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbLine)); + assertThat(wkb.get(1), equalTo(wkbPoint)); // Test a single shape in wkt format. sourceValue = wktLineString; @@ -109,26 +128,31 @@ public void testFetchStoredValue() throws IOException { geoFormatterFactory ).setStored(true).build(MapperBuilderContext.root(randomBoolean(), false)).fieldType(); - ByteOrder byteOrder = randomBoolean() ? ByteOrder.BIG_ENDIAN : ByteOrder.LITTLE_ENDIAN; - Map jsonLineString = Map.of("type", "LineString", "coordinates", List.of(List.of(42.0, 27.1), List.of(30.0, 50.0))); Map jsonPoint = Map.of("type", "Point", "coordinates", List.of(14.0, 15.0)); String wktLineString = "LINESTRING (42.0 27.1, 30.0 50.0)"; String wktPoint = "POINT (14.0 15.0)"; BytesRef wkbLineString = new BytesRef( - WellKnownBinary.toWKB(new Line(new double[] { 42.0, 30.0 }, new double[] { 27.1, 50.0 }), byteOrder) + WellKnownBinary.toWKB(new Line(new double[] { 42.0, 30.0 }, new double[] { 27.1, 50.0 }), ByteOrder.LITTLE_ENDIAN) ); - BytesRef wkbPoint = new BytesRef(WellKnownBinary.toWKB(new Point(14.0, 15.0), byteOrder)); + BytesRef wkbPoint = new BytesRef(WellKnownBinary.toWKB(new Point(14.0, 15.0), ByteOrder.LITTLE_ENDIAN)); // Test a single shape in wkb format. List storedValues = List.of(wkbLineString); assertEquals(List.of(jsonLineString), fetchStoredValue(mapper, storedValues, null)); assertEquals(List.of(wktLineString), fetchStoredValue(mapper, storedValues, "wkt")); + List wkb = fetchStoredValue(mapper, storedValues, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbLineString.bytes)); // Test a list of shapes in wkb format. storedValues = List.of(wkbLineString, wkbPoint); assertEquals(List.of(jsonLineString, jsonPoint), fetchStoredValue(mapper, storedValues, null)); assertEquals(List.of(wktLineString, wktPoint), fetchStoredValue(mapper, storedValues, "wkt")); + wkb = fetchStoredValue(mapper, storedValues, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbLineString.bytes)); + assertThat(wkb.get(1), equalTo(wkbPoint.bytes)); } public void testFetchVectorTile() throws IOException { @@ -180,9 +204,9 @@ private void fetchVectorTile(Geometry geometry) throws IOException { // happen that the geometry is out of range (close to the poles). features = List.of(); } - assertThat(features.size(), Matchers.equalTo(sourceValue.size())); + assertThat(features.size(), equalTo(sourceValue.size())); for (int i = 0; i < features.size(); i++) { - assertThat(sourceValue.get(i), Matchers.equalTo(features.get(i))); + assertThat(sourceValue.get(i), equalTo(features.get(i))); } } @@ -308,10 +332,10 @@ private void assertFetchSourceMVT(Object sourceValue, String mvtEquivalentAsWKT) final int extent = randomIntBetween(256, 4096); List mvtExpected = fetchSourceValue(mapper, mvtEquivalentAsWKT, "mvt(0/0/0@" + extent + ")"); List mvt = fetchSourceValue(mapper, sourceValue, "mvt(0/0/0@" + extent + ")"); - assertThat(mvt.size(), Matchers.equalTo(1)); - assertThat(mvt.size(), Matchers.equalTo(mvtExpected.size())); - assertThat(mvtExpected.get(0), Matchers.instanceOf(byte[].class)); - assertThat(mvt.get(0), Matchers.instanceOf(byte[].class)); - assertThat((byte[]) mvt.get(0), Matchers.equalTo((byte[]) mvtExpected.get(0))); + assertThat(mvt.size(), equalTo(1)); + assertThat(mvt.size(), equalTo(mvtExpected.size())); + assertThat(mvtExpected.get(0), instanceOf(byte[].class)); + assertThat(mvt.get(0), instanceOf(byte[].class)); + assertThat((byte[]) mvt.get(0), equalTo((byte[]) mvtExpected.get(0))); } } diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java index ed902b0f8cfe1..6524860e9438c 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/PointFieldTypeTests.java @@ -7,14 +7,19 @@ package org.elasticsearch.xpack.spatial.index.mapper; +import org.elasticsearch.geometry.Point; +import org.elasticsearch.geometry.utils.WellKnownBinary; import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; import java.io.IOException; +import java.nio.ByteOrder; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class PointFieldTypeTests extends FieldTypeTestCase { public void testFetchSourceValue() throws IOException { @@ -24,26 +29,39 @@ public void testFetchSourceValue() throws IOException { String wktPoint = "POINT (42.0 27.1)"; Map otherJsonPoint = Map.of("type", "Point", "coordinates", List.of(30.0, 50.0)); String otherWktPoint = "POINT (30.0 50.0)"; + byte[] wkbPoint = WellKnownBinary.toWKB(new Point(42.0, 27.1), ByteOrder.LITTLE_ENDIAN); + byte[] otherWkbPoint = WellKnownBinary.toWKB(new Point(30.0, 50.0), ByteOrder.LITTLE_ENDIAN); // Test a single point in [x, y] array format. Object sourceValue = List.of(42.0, 27.1); assertEquals(List.of(jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + List wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbPoint)); // Test a single point in "x, y" string format. sourceValue = "42.0,27.1"; assertEquals(List.of(jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbPoint)); // Test a malformed single point sourceValue = "foo"; assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkt")); + assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkb")); // Test a list of points in [x, y] array format. sourceValue = List.of(List.of(42.0, 27.1), List.of(30.0, 50.0)); assertEquals(List.of(jsonPoint, otherJsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktPoint, otherWktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbPoint)); + assertThat(wkb.get(1), equalTo(otherWkbPoint)); // Test a single point in well-known text format. sourceValue = "POINT (42.0 27.1)"; diff --git a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java index 1050c9acef11a..c7d87a6c6e8f5 100644 --- a/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java +++ b/x-pack/plugin/spatial/src/test/java/org/elasticsearch/xpack/spatial/index/mapper/ShapeFieldTypeTests.java @@ -7,18 +7,23 @@ package org.elasticsearch.xpack.spatial.index.mapper; +import org.elasticsearch.geometry.utils.StandardValidator; +import org.elasticsearch.geometry.utils.WellKnownBinary; +import org.elasticsearch.geometry.utils.WellKnownText; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.mapper.FieldTypeTestCase; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.MapperBuilderContext; -import java.io.IOException; +import java.nio.ByteOrder; import java.util.List; import java.util.Map; +import static org.hamcrest.Matchers.equalTo; + public class ShapeFieldTypeTests extends FieldTypeTestCase { - public void testFetchSourceValue() throws IOException { + public void testFetchSourceValue() throws Exception { MappedFieldType mapper = new ShapeFieldMapper.Builder("field", IndexVersion.current(), false, true).build( MapperBuilderContext.root(false, false) ).fieldType(); @@ -29,26 +34,43 @@ public void testFetchSourceValue() throws IOException { String wktLineString = "LINESTRING (42.0 27.1, 30.0 50.0)"; String wktPoint = "POINT (14.3 15.0)"; String wktMalformed = "POINT foo"; + byte[] wkbLine = WellKnownBinary.toWKB( + WellKnownText.fromWKT(StandardValidator.NOOP, false, wktLineString), + ByteOrder.LITTLE_ENDIAN + ); + byte[] wkbPoint = WellKnownBinary.toWKB(WellKnownText.fromWKT(StandardValidator.NOOP, false, wktPoint), ByteOrder.LITTLE_ENDIAN); // Test a single shape in geojson format. Object sourceValue = jsonLineString; assertEquals(List.of(jsonLineString), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktLineString), fetchSourceValue(mapper, sourceValue, "wkt")); + List wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(1)); + assertThat(wkb.get(0), equalTo(wkbLine)); // Test a malformed single shape in geojson format sourceValue = jsonMalformed; assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkt")); + assertEquals(List.of(), fetchSourceValue(mapper, sourceValue, "wkb")); // Test a list of shapes in geojson format. sourceValue = List.of(jsonLineString, jsonPoint); assertEquals(List.of(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbLine)); + assertThat(wkb.get(1), equalTo(wkbPoint)); // Test a list of shapes including one malformed in geojson format sourceValue = List.of(jsonLineString, jsonMalformed, jsonPoint); assertEquals(List.of(jsonLineString, jsonPoint), fetchSourceValue(mapper, sourceValue, null)); assertEquals(List.of(wktLineString, wktPoint), fetchSourceValue(mapper, sourceValue, "wkt")); + wkb = fetchSourceValue(mapper, sourceValue, "wkb"); + assertThat(wkb.size(), equalTo(2)); + assertThat(wkb.get(0), equalTo(wkbLine)); + assertThat(wkb.get(1), equalTo(wkbPoint)); // Test a single shape in wkt format. sourceValue = wktLineString; diff --git a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTestUtils.java b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTestUtils.java index bafdbeed8f1a4..42f42e2a26c03 100644 --- a/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTestUtils.java +++ b/x-pack/plugin/sql/sql-action/src/test/java/org/elasticsearch/xpack/sql/action/SqlTestUtils.java @@ -103,10 +103,13 @@ static XContentBuilder toXContentBuilder(XContentBuilder builder, CheckedConsume objectGenerator.accept(generator); generator.close(); // System.out.println(out.toString(StandardCharsets.UTF_8)); - XContentParser parser = builder.contentType() - .xContent() - .createParser(XContentParserConfiguration.EMPTY, new ByteArrayInputStream(out.toByteArray())); - builder.copyCurrentStructure(parser); + try ( + XContentParser parser = builder.contentType() + .xContent() + .createParser(XContentParserConfiguration.EMPTY, new ByteArrayInputStream(out.toByteArray())) + ) { + builder.copyCurrentStructure(parser); + } builder.flush(); ByteArrayOutputStream stream = (ByteArrayOutputStream) builder.getOutputStream(); assertEquals("serialized objects differ", out.toString(StandardCharsets.UTF_8), stream.toString(StandardCharsets.UTF_8)); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java index a0da67f3006a3..936f4aa23cd57 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/execution/search/Querier.java @@ -161,6 +161,7 @@ private void searchWithPointInTime(SearchRequest search, ActionListener { String pitId = openPointInTimeResponse.getPointInTimeId(); + search.indicesOptions(SearchRequest.DEFAULT_INDICES_OPTIONS); search.indices(Strings.EMPTY_ARRAY); search.source().pointInTimeBuilder(new PointInTimeBuilder(pitId)); ActionListener closePitOnErrorListener = wrap(searchResponse -> { @@ -201,13 +202,14 @@ public static SearchRequest prepareRequest(SearchSourceBuilder source, SqlConfig source.timeout(cfg.requestTimeout()); SearchRequest searchRequest = new SearchRequest(INTRODUCING_UNSIGNED_LONG); - searchRequest.indices(indices); + if (source.pointInTimeBuilder() == null) { + searchRequest.indices(indices); + searchRequest.indicesOptions( + includeFrozen ? IndexResolver.FIELD_CAPS_FROZEN_INDICES_OPTIONS : IndexResolver.FIELD_CAPS_INDICES_OPTIONS + ); + } searchRequest.source(source); searchRequest.allowPartialSearchResults(cfg.allowPartialSearchResults()); - searchRequest.indicesOptions( - includeFrozen ? IndexResolver.FIELD_CAPS_FROZEN_INDICES_OPTIONS : IndexResolver.FIELD_CAPS_INDICES_OPTIONS - ); - return searchRequest; } diff --git a/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index 0efe2797c7f76..556a417fb5e79 100644 --- a/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/yamlRestTest/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -43,7 +43,6 @@ public class XPackRestIT extends AbstractXPackRestTest { .setting("xpack.searchable.snapshot.shared_cache.region_size", "256KB") .user("x_pack_rest_user", "x-pack-test-password") .feature(FeatureFlag.TIME_SERIES_MODE) - .feature(FeatureFlag.LEARNING_TO_RANK) .configFile("testnode.pem", Resource.fromClasspath("org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) .configFile("testnode.crt", Resource.fromClasspath("org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) .configFile("service_tokens", Resource.fromClasspath("service_tokens")) diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/10_basic.yml index b575ddccb449a..09f08d59049ec 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/api_key/10_basic.yml @@ -191,6 +191,7 @@ teardown: Authorization: "Basic YXBpX2tleV91c2VyOngtcGFjay10ZXN0LXBhc3N3b3Jk" # api_key_user security.get_api_key: owner: true + active_only: true - length: { "api_keys" : 1 } - match: { "api_keys.0.username": "api_key_user" } - match: { "api_keys.0.invalidated": false } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/10_basic.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/10_basic.yml index d0f7c7636582f..4a0d6387683ac 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/10_basic.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/10_basic.yml @@ -29,6 +29,16 @@ setup: - do: indices.refresh: { } +--- +"Counted keyword is searchable by default": + - do: + field_caps: + index: test-events + fields: [ events ] + + - match: { fields.events.counted_keyword.searchable: true } + - match: { fields.events.counted_keyword.aggregatable: true } + --- "Counted Terms agg": diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/20_no_index.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/20_no_index.yml new file mode 100644 index 0000000000000..1fe48207b5586 --- /dev/null +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/counted_keyword/20_no_index.yml @@ -0,0 +1,54 @@ +setup: + + - skip: + version: " - 8.12.99" + reason: "index option on counted_keyword was added in 8.13" + + - do: + indices.create: + index: test-events-no-index + body: + mappings: + properties: + events: + type: counted_keyword + index: false + + - do: + index: + index: test-events-no-index + id: "1" + body: { "events": [ "a", "a", "b" ] } + + + - do: + indices.refresh: { } + +--- +"Counted keyword with index false is not searchable": + - do: + field_caps: + index: test-events-no-index + fields: [ events ] + + - match: { fields.events.counted_keyword.searchable: false } + - match: { fields.events.counted_keyword.aggregatable: true } + +--- +"Counted Terms agg only relies on doc values": +# although the field is not indexed, the counted_terms agg should still work + - do: + search: + index: test-events-no-index + body: + size: 0 + aggs: + event_terms: + counted_terms: + field: events + + - match: { aggregations.event_terms.buckets.0.key: "a" } + - match: { aggregations.event_terms.buckets.0.doc_count: 2 } + - match: { aggregations.event_terms.buckets.1.key: "b" } + - match: { aggregations.event_terms.buckets.1.doc_count: 1 } + - length: { aggregations.event_terms.buckets: 2 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml index e768a6b348959..102885af53ad7 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/100_bug_fix.yml @@ -126,3 +126,71 @@ - match: { values.2.0: null } - match: { values.2.1: null } - match: { values.2.2: index1 } + + +--- +"null MappedFieldType on single value detection (https://github.com/elastic/elasticsearch/issues/103141)": + - skip: + version: all + reason: "AwaitsFix fix https://github.com/elastic/elasticsearch/issues/103561" + - do: + indices.create: + index: npe_single_value_1 + body: + mappings: + properties: + field1: + type: long + - do: + indices.create: + index: npe_single_value_2 + body: + mappings: + properties: + field2: + type: long + - do: + indices.create: + index: npe_single_value_3 + body: + mappings: + properties: + field3: + type: long + + - do: + bulk: + refresh: true + body: + - { "index": { "_index": "npe_single_value_1" } } + - { "field1": 10 } + - { "index": { "_index": "npe_single_value_2" } } + - { "field2": 20 } + - { "index": { "_index": "npe_single_value_3" } } + - { "field3": 30 } + - do: + esql.query: + body: + query: 'from npe_single_value* | stats x = avg(field1) | limit 10' + - match: { columns.0.name: x } + - match: { columns.0.type: double } + - length: { values: 1 } + - match: { values.0.0: 10.0 } + + - do: + esql.query: + body: + query: 'from npe_single_value* | stats x = avg(field2) | limit 10' + - match: { columns.0.name: x } + - match: { columns.0.type: double } + - length: { values: 1 } + - match: { values.0.0: 20.0 } + + - do: + esql.query: + body: + query: 'from npe_single_value* | stats x = avg(field3) | limit 10' + - match: { columns.0.name: x } + - match: { columns.0.type: double } + - length: { values: 1 } + - match: { values.0.0: 30.0 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search.yml index a099e327c32f0..5e29d3cdf2ae6 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/text_expansion_search.yml @@ -107,3 +107,183 @@ setup: model_text: "octopus comforter smells" - match: { hits.total.value: 4 } - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } + +--- +"Test text expansion search with pruning config": + - skip: + version: " - 8.12.99" + reason: "pruning introduced in 8.13.0" + + - do: + search: + index: index-with-rank-features + body: + query: + text_expansion: + ml.tokens: + model_id: text_expansion_model + model_text: "octopus comforter smells" + pruning_config: + tokens_freq_ratio_threshold: 4 + tokens_weight_threshold: 0.4 + - match: { hits.total.value: 4 } + - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } + +--- +"Test named, boosted text expansion search with pruning config": + - skip: + version: " - 8.12.99" + reason: "pruning introduced in 8.13.0" + - do: + search: + index: index-with-rank-features + body: + query: + text_expansion: + ml.tokens: + model_id: text_expansion_model + model_text: "octopus comforter smells" + pruning_config: + tokens_freq_ratio_threshold: 4 + tokens_weight_threshold: 0.4 + - match: { hits.total.value: 4 } + - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } + - match: { hits.hits.0._score: 3.0 } + + - do: + search: + index: index-with-rank-features + body: + query: + text_expansion: + ml.tokens: + model_id: text_expansion_model + model_text: "octopus comforter smells" + pruning_config: + tokens_freq_ratio_threshold: 4 + tokens_weight_threshold: 0.4 + _name: i-like-naming-my-queries + boost: 100.0 + - match: { hits.total.value: 4 } + - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } + - match: { hits.hits.0.matched_queries: ["i-like-naming-my-queries"] } + - match: { hits.hits.0._score: 300.0 } + +--- +"Test text expansion search with default pruning config": + - skip: + version: " - 8.12.99" + reason: "pruning introduced in 8.13.0" + + - do: + search: + index: index-with-rank-features + body: + query: + text_expansion: + ml.tokens: + model_id: text_expansion_model + model_text: "octopus comforter smells" + pruning_config: {} + - match: { hits.total.value: 4 } + - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } + +--- +"Test text expansion search with weighted tokens rescoring only pruned tokens": + - skip: + version: " - 8.12.99" + reason: "pruning introduced in 8.13.0" + + - do: + search: + index: index-with-rank-features + body: + query: + text_expansion: + ml.tokens: + model_id: text_expansion_model + model_text: "octopus comforter smells" + pruning_config: + tokens_freq_ratio_threshold: 4 + tokens_weight_threshold: 0.4 + only_score_pruned_tokens: true + - match: { hits.total.value: 0 } + +--- +"Test weighted tokens search": + - skip: + version: " - 8.12.99" + reason: "weighted token search introduced in 8.13.0" + + - do: + search: + index: index-with-rank-features + body: + query: + weighted_tokens: + ml.tokens: + tokens: [{"the": 1.0}, {"comforter":1.0}, {"smells":1.0}, {"bad": 1.0}] + pruning_config: + tokens_freq_ratio_threshold: 1 + tokens_weight_threshold: 0.4 + only_score_pruned_tokens: false + - match: { hits.total.value: 5 } + - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } + +--- +"Test weighted tokens search with default pruning config": + - skip: + version: " - 8.12.99" + reason: "weighted token search introduced in 8.13.0" + + - do: + search: + index: index-with-rank-features + body: + query: + weighted_tokens: + ml.tokens: + tokens: [{"the": 1.0}, {"comforter":1.0}, {"smells":1.0}, {"bad": 1.0}] + pruning_config: {} + - match: { hits.total.value: 5 } + - match: { hits.hits.0._source.source_text: "the octopus comforter smells" } + +--- +"Test weighted tokens search only scoring pruned tokens": + - skip: + version: " - 8.12.99" + reason: "weighted token search introduced in 8.13.0" + + - do: + search: + index: index-with-rank-features + body: + query: + weighted_tokens: + ml.tokens: + tokens: [{"the": 1.0}, {"comforter":1.0}, {"smells":1.0}, {"bad": 1.0}] + pruning_config: + tokens_freq_ratio_threshold: 4 + tokens_weight_threshold: 0.4 + only_score_pruned_tokens: true + - match: { hits.total.value: 0 } + +--- +"Test weighted tokens search that prunes tokens based on frequency": + - skip: + version: " - 8.12.99" + reason: "weighted token search introduced in 8.13.0" + + - do: + search: + index: index-with-rank-features + body: + query: + weighted_tokens: + ml.tokens: + tokens: [{"the": 1.0}, {"octopus":1.0}, {"comforter":1.0}, {"is": 1.0}, {"the": 1.0}, {"best": 1.0}, {"of": 1.0}, {"the": 1.0}, {"bunch": 1.0}] + pruning_config: + tokens_freq_ratio_threshold: 3 + tokens_weight_threshold: 0.4 + only_score_pruned_tokens: true + - match: { hits.total.value: 0 } diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml index 10de6e2c22d9e..1df34a64f860a 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/ml/validate.yml @@ -2,7 +2,7 @@ "Test valid job config": - do: ml.validate: - body: > + body: > { "analysis_config": { "bucket_span": "1h", @@ -21,7 +21,7 @@ - do: catch: /.data_description. failed to parse field .format./ ml.validate: - body: > + body: > { "analysis_config": { "bucket_span": "1h", @@ -38,7 +38,7 @@ "Test valid job config with job ID": - do: ml.validate: - body: > + body: > { "job_id": "validate-job-config-with-job-id", "analysis_config": { @@ -58,7 +58,7 @@ - do: catch: /Invalid job_id; '_' can contain lowercase alphanumeric \(a-z and 0-9\), hyphens or underscores; must start and end with alphanumeric/ ml.validate: - body: > + body: > { "job_id": "_", "analysis_config": { @@ -78,7 +78,7 @@ - do: catch: /illegal_argument_exception/ ml.validate: - body: > + body: > { "model_snapshot_id": "wont-create-with-this-setting", "analysis_config" : { @@ -92,7 +92,7 @@ - do: catch: /The job is configured with fields \[model_snapshot_id\] that are illegal to set at job creation/ ml.validate: - body: > + body: > { "model_snapshot_id": "wont-create-with-this-setting", "analysis_config" : { @@ -109,7 +109,7 @@ - do: catch: /illegal_argument_exception.*Duplicate detectors are not allowed/ ml.validate: - body: > + body: > { "analysis_config": { "bucket_span": "1h", @@ -126,7 +126,7 @@ - do: catch: /illegal_argument_exception.*Duplicate detectors are not allowed/ ml.validate: - body: > + body: > { "analysis_config": { "bucket_span": "1h", diff --git a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/NdJsonTextStructureFinder.java b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/NdJsonTextStructureFinder.java index 2b8f3c678e9d5..e43b2cfdc96d3 100644 --- a/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/NdJsonTextStructureFinder.java +++ b/x-pack/plugin/text-structure/src/main/java/org/elasticsearch/xpack/textstructure/structurefinder/NdJsonTextStructureFinder.java @@ -44,9 +44,10 @@ static NdJsonTextStructureFinder makeNdJsonTextStructureFinder( List sampleMessages = Arrays.asList(sample.split("\n")); for (String sampleMessage : sampleMessages) { - XContentParser parser = jsonXContent.createParser(XContentParserConfiguration.EMPTY, sampleMessage); - sampleRecords.add(parser.mapOrdered()); - timeoutChecker.check("NDJSON parsing"); + try (XContentParser parser = jsonXContent.createParser(XContentParserConfiguration.EMPTY, sampleMessage)) { + sampleRecords.add(parser.mapOrdered()); + timeoutChecker.check("NDJSON parsing"); + } } TextStructure.Builder structureBuilder = new TextStructure.Builder(TextStructure.Format.NDJSON).setCharset(charsetName) diff --git a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle index ef34db62e5e03..8f129789d46b7 100644 --- a/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle +++ b/x-pack/plugin/transform/qa/multi-cluster-tests-with-security/build.gradle @@ -2,6 +2,7 @@ import org.elasticsearch.gradle.internal.test.RestIntegTestTask import org.elasticsearch.gradle.Version import org.elasticsearch.gradle.VersionProperties import static org.elasticsearch.gradle.PropertyNormalization.IGNORE_VALUE +import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-testclusters' apply plugin: 'elasticsearch.standalone-rest-test' @@ -53,16 +54,29 @@ testClusters.register('mixed-cluster') { tasks.register('remote-cluster', RestIntegTestTask) { mustRunAfter("precommit") systemProperty 'tests.rest.suite', 'remote_cluster' + maybeDisableForFips(it) } tasks.register('mixed-cluster', RestIntegTestTask) { dependsOn 'remote-cluster' useCluster remoteCluster systemProperty 'tests.rest.suite', 'multi_cluster' + maybeDisableForFips(it) } tasks.register("integTest") { dependsOn 'mixed-cluster' + maybeDisableForFips(it) } tasks.named("check").configure { dependsOn("integTest") } + +//TODO: remove with version 8.14. A new FIPS setting was added in 8.13. Since FIPS configures all test clusters and this specific integTest uses +// the previous minor version, that setting is not available when running in FIPS until 8.14. +def maybeDisableForFips(task) { + if (BuildParams.inFipsJvm) { + if(Version.fromString(project.version).before(Version.fromString('8.14.0'))) { + task.enabled = false + } + } +} diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/DefaultTransformExtension.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/DefaultTransformExtension.java index 9cccbade339dc..ea9260f555905 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/DefaultTransformExtension.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/DefaultTransformExtension.java @@ -9,9 +9,12 @@ import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; public class DefaultTransformExtension implements TransformExtension { + private static final TimeValue MIN_FREQUENCY = TimeValue.timeValueSeconds(1); + @Override public boolean includeNodeInfo() { return true; @@ -33,4 +36,9 @@ public Settings getTransformDestinationIndexSettings() { .put(IndexMetadata.SETTING_AUTO_EXPAND_REPLICAS, "0-1") .build(); } + + @Override + public TimeValue getMinFrequency() { + return MIN_FREQUENCY; + } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java index 61cc0e2c072ad..98c95c5a9803a 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/Transform.java @@ -243,7 +243,12 @@ public Collection createComponents(PluginServices services) { configManager, auditor ); - TransformScheduler scheduler = new TransformScheduler(clock, services.threadPool(), settings); + TransformScheduler scheduler = new TransformScheduler( + clock, + services.threadPool(), + settings, + getTransformExtension().getMinFrequency() + ); scheduler.start(); transformServices.set(new TransformServices(configManager, checkpointService, auditor, scheduler)); diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformExtension.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformExtension.java index c919f4dd4c550..4794f3c86f259 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformExtension.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/TransformExtension.java @@ -8,6 +8,7 @@ package org.elasticsearch.xpack.transform; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; public interface TransformExtension { @@ -20,4 +21,10 @@ public interface TransformExtension { * source settings. */ Settings getTransformDestinationIndexSettings(); + + // TODO(jkuipers): remove this default implementation after the ServerlessTransformPlugin + // in the elasticsearch-serverless project is updated. + default TimeValue getMinFrequency() { + return TimeValue.timeValueSeconds(1); + } } diff --git a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java index 29be02b87cbdf..1b8d14c6cdc2f 100644 --- a/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java +++ b/x-pack/plugin/transform/src/main/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexer.java @@ -519,16 +519,23 @@ private void injectPointInTimeIfNeeded( void doSearch(Tuple namedSearchRequest, ActionListener listener) { String name = namedSearchRequest.v1(); - SearchRequest searchRequest = namedSearchRequest.v2(); + SearchRequest originalRequest = namedSearchRequest.v2(); // We want to treat a request to search 0 indices as a request to do nothing, not a request to search all indices - if (searchRequest.indices().length == 0) { - logger.debug("[{}] Search request [{}] optimized to noop; searchRequest [{}]", getJobId(), name, searchRequest); + if (originalRequest.indices().length == 0) { + logger.debug("[{}] Search request [{}] optimized to noop; searchRequest [{}]", getJobId(), name, originalRequest); listener.onResponse(null); return; } - logger.trace("searchRequest: [{}]", searchRequest); - PointInTimeBuilder pit = searchRequest.pointInTimeBuilder(); + final SearchRequest searchRequest; + PointInTimeBuilder pit = originalRequest.pointInTimeBuilder(); + if (pit != null) { + // remove the indices from the request, they will be derived from the provided pit + searchRequest = new SearchRequest(originalRequest).indices(new String[0]).indicesOptions(SearchRequest.DEFAULT_INDICES_OPTIONS); + } else { + searchRequest = originalRequest; + } + logger.trace("searchRequest: [{}]", searchRequest); ClientHelper.executeWithHeadersAsync( transformConfig.getHeaders(), @@ -555,13 +562,13 @@ void doSearch(Tuple namedSearchRequest, ActionListener namedSearchRequest, ActionListener new TransformScheduledTask( task.getTransformId(), - task.getFrequency(), + getFrequency(task.getFrequency()), task.getLastTriggeredTimeMillis(), failureCount, task.getListener() @@ -245,7 +249,7 @@ public void scheduleNow(String transformId) { transformId, task -> new TransformScheduledTask( task.getTransformId(), - task.getFrequency(), + getFrequency(task.getFrequency()), task.getLastTriggeredTimeMillis(), task.getFailureCount(), currentTimeMillis, // we schedule this task at current clock time so that it is processed ASAP @@ -273,4 +277,11 @@ public void deregisterTransform(String transformId) { List getTransformScheduledTasks() { return scheduledTasks.listScheduledTasks(); } + + private TimeValue getFrequency(TimeValue frequency) { + if (frequency == null) { + frequency = Transform.DEFAULT_TRANSFORM_FREQUENCY; + } + return frequency.compareTo(minFrequency) >= 0 ? frequency : minFrequency; + } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java index bed646b9ddeb2..0a1179e4224aa 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/checkpoint/TimeBasedCheckpointProviderTests.java @@ -13,7 +13,6 @@ import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.search.TransportSearchAction; import org.elasticsearch.client.internal.Client; @@ -339,15 +338,13 @@ public SingleGroupSource get() { private static SearchResponse newSearchResponse(long totalHits) { return new SearchResponse( - new SearchResponseSections( - new SearchHits(SearchHits.EMPTY, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), 0), - null, - null, - false, - false, - null, - 0 - ), + new SearchHits(SearchHits.EMPTY, new TotalHits(totalHits, TotalHits.Relation.EQUAL_TO), 0), + null, + null, + false, + false, + null, + 0, null, 1, 1, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java index 06de37af346d2..b1c9edc0fab0a 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/ClientTransformIndexerTests.java @@ -23,6 +23,7 @@ import org.elasticsearch.action.support.ActionTestUtils; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.IndexNotFoundException; import org.elasticsearch.search.SearchContextMissingException; @@ -30,7 +31,6 @@ import org.elasticsearch.search.SearchHits; import org.elasticsearch.search.builder.PointInTimeBuilder; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.internal.ShardSearchContextId; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; @@ -59,7 +59,6 @@ import java.time.Clock; import java.time.Instant; -import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.concurrent.CountDownLatch; @@ -135,7 +134,7 @@ public void testPitInjection() throws InterruptedException { mock(IndexBasedTransformConfigManager.class), mock(TransformCheckpointService.class), mock(TransformAuditor.class), - new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY) + new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY, TimeValue.ZERO) ), mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), @@ -229,7 +228,7 @@ public void testPitInjectionIfPitNotSupported() throws InterruptedException { mock(IndexBasedTransformConfigManager.class), mock(TransformCheckpointService.class), mock(TransformAuditor.class), - new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY) + new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY, TimeValue.ZERO) ), mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), @@ -306,7 +305,7 @@ public void testDisablePit() throws InterruptedException { mock(IndexBasedTransformConfigManager.class), mock(TransformCheckpointService.class), mock(TransformAuditor.class), - new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY) + new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY, TimeValue.ZERO) ), mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), @@ -370,7 +369,7 @@ public void testDisablePitWhenThereIsRemoteIndexInSource() throws InterruptedExc mock(IndexBasedTransformConfigManager.class), mock(TransformCheckpointService.class), mock(TransformAuditor.class), - new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY) + new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY, TimeValue.ZERO) ), mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), @@ -420,8 +419,9 @@ public void testHandlePitIndexNotFound() throws InterruptedException { try (var threadPool = createThreadPool()) { final var client = new PitMockClient(threadPool, true); ClientTransformIndexer indexer = createTestIndexer(new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456"))); - SearchRequest searchRequest = new SearchRequest("deleted-index"); - searchRequest.source().pointInTimeBuilder(new PointInTimeBuilder("the_pit_id")); + SearchRequest searchRequest = new SearchRequest("deleted-index").source( + new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder("the_pit_id_on_deleted_index")) + ); Tuple namedSearchRequest = new Tuple<>("test-handle-pit-index-not-found", searchRequest); this.assertAsync(listener -> indexer.doSearch(namedSearchRequest, listener), response -> { // if the pit got deleted, we know it retried @@ -433,8 +433,9 @@ public void testHandlePitIndexNotFound() throws InterruptedException { try (var threadPool = createThreadPool()) { final var client = new PitMockClient(threadPool, true); ClientTransformIndexer indexer = createTestIndexer(new ParentTaskAssigningClient(client, new TaskId("dummy-node:123456"))); - SearchRequest searchRequest = new SearchRequest("essential-deleted-index"); - searchRequest.source().pointInTimeBuilder(new PointInTimeBuilder("the_pit_id")); + SearchRequest searchRequest = new SearchRequest("essential-deleted-index").source( + new SearchSourceBuilder().pointInTimeBuilder(new PointInTimeBuilder("the_pit_id_essential-deleted-index")) + ); Tuple namedSearchRequest = new Tuple<>("test-handle-pit-index-not-found", searchRequest); indexer.doSearch(namedSearchRequest, ActionListener.wrap(r -> fail("expected a failure, got response"), e -> { assertTrue(e instanceof IndexNotFoundException); @@ -521,14 +522,16 @@ protected void listener.onResponse((Response) response); return; } else if (request instanceof SearchRequest searchRequest) { - // if pit is used and deleted-index is given throw index not found - if (searchRequest.pointInTimeBuilder() != null && Arrays.binarySearch(searchRequest.indices(), "deleted-index") >= 0) { + if (searchRequest.pointInTimeBuilder() != null + && searchRequest.pointInTimeBuilder().getEncodedId().equals("the_pit_id_on_deleted_index")) { listener.onFailure(new IndexNotFoundException("deleted-index")); return; } - if (Arrays.binarySearch(searchRequest.indices(), "essential-deleted-index") >= 0) { + if ((searchRequest.pointInTimeBuilder() != null + && searchRequest.pointInTimeBuilder().getEncodedId().equals("the_pit_id_essential-deleted-index")) + || (searchRequest.indices().length > 0 && searchRequest.indices()[0].equals("essential-deleted-index"))) { listener.onFailure(new IndexNotFoundException("essential-deleted-index")); return; } @@ -539,16 +542,14 @@ protected void listener.onFailure(new SearchContextMissingException(new ShardSearchContextId("sc_missing", 42))); } else { SearchResponse response = new SearchResponse( - new InternalSearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), - // Simulate completely null aggs - null, - new Suggest(Collections.emptyList()), - new SearchProfileResults(Collections.emptyMap()), - false, - false, - 1 - ), + new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + // Simulate completely null aggs + null, + new Suggest(Collections.emptyList()), + false, + false, + new SearchProfileResults(Collections.emptyMap()), + 1, null, 1, 1, @@ -564,7 +565,6 @@ protected void } return; } - super.doExecute(action, request, listener); } } @@ -599,7 +599,7 @@ private ClientTransformIndexer createTestIndexer(ParentTaskAssigningClient clien mock(IndexBasedTransformConfigManager.class), mock(TransformCheckpointService.class), mock(TransformAuditor.class), - new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY) + new TransformScheduler(Clock.systemUTC(), mock(ThreadPool.class), Settings.EMPTY, TimeValue.ZERO) ), mock(CheckpointProvider.class), new AtomicReference<>(IndexerState.STOPPED), diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java index d3be18a193415..5c6539d0a5045 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureHandlingTests.java @@ -30,7 +30,6 @@ import org.elasticsearch.script.ScriptException; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.ESTestCase; @@ -129,7 +128,7 @@ static class MockedTransformIndexer extends ClientTransformIndexer { transformsConfigManager, mock(TransformCheckpointService.class), auditor, - new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY) + new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO) ), checkpointProvider, initialState, @@ -224,16 +223,14 @@ protected void onAbort() { void doGetInitialProgress(SearchRequest request, ActionListener responseListener) { responseListener.onResponse( new SearchResponse( - new InternalSearchResponse( - new SearchHits(new SearchHit[0], new TotalHits(0L, TotalHits.Relation.EQUAL_TO), 0.0f), - // Simulate completely null aggs - null, - new Suggest(Collections.emptyList()), - new SearchProfileResults(Collections.emptyMap()), - false, - false, - 1 - ), + new SearchHits(new SearchHit[0], new TotalHits(0L, TotalHits.Relation.EQUAL_TO), 0.0f), + // Simulate completely null aggs + null, + new Suggest(Collections.emptyList()), + false, + false, + new SearchProfileResults(Collections.emptyMap()), + 1, "", 1, 1, @@ -375,16 +372,14 @@ public void testDoProcessAggNullCheck() { null ); SearchResponse searchResponse = new SearchResponse( - new InternalSearchResponse( - new SearchHits(new SearchHit[0], new TotalHits(0L, TotalHits.Relation.EQUAL_TO), 0.0f), - // Simulate completely null aggs - null, - new Suggest(Collections.emptyList()), - new SearchProfileResults(Collections.emptyMap()), - false, - false, - 1 - ), + new SearchHits(new SearchHit[0], new TotalHits(0L, TotalHits.Relation.EQUAL_TO), 0.0f), + // Simulate completely null aggs + null, + new Suggest(Collections.emptyList()), + false, + false, + new SearchProfileResults(Collections.emptyMap()), + 1, "", 1, 1, @@ -513,16 +508,14 @@ public void testRetentionPolicyDeleteByQueryThrowsIrrecoverable() throws Excepti ); final SearchResponse searchResponse = new SearchResponse( - new InternalSearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), - // Simulate completely null aggs - null, - new Suggest(Collections.emptyList()), - new SearchProfileResults(Collections.emptyMap()), - false, - false, - 1 - ), + new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + // Simulate completely null aggs + null, + new Suggest(Collections.emptyList()), + false, + false, + new SearchProfileResults(Collections.emptyMap()), + 1, "", 1, 1, @@ -605,16 +598,14 @@ public void testRetentionPolicyDeleteByQueryThrowsTemporaryProblem() throws Exce ); final SearchResponse searchResponse = new SearchResponse( - new InternalSearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), - // Simulate completely null aggs - null, - new Suggest(Collections.emptyList()), - new SearchProfileResults(Collections.emptyMap()), - false, - false, - 1 - ), + new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + // Simulate completely null aggs + null, + new Suggest(Collections.emptyList()), + false, + false, + new SearchProfileResults(Collections.emptyMap()), + 1, "", 1, 1, @@ -700,16 +691,14 @@ public void testFailureCounterIsResetOnSuccess() throws Exception { ); final SearchResponse searchResponse = new SearchResponse( - new InternalSearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), - // Simulate completely null aggs - null, - new Suggest(Collections.emptyList()), - new SearchProfileResults(Collections.emptyMap()), - false, - false, - 1 - ), + new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + // Simulate completely null aggs + null, + new Suggest(Collections.emptyList()), + false, + false, + new SearchProfileResults(Collections.emptyMap()), + 1, "", 1, 1, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureOnStatePersistenceTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureOnStatePersistenceTests.java index 55ae653c39629..750e535c4330f 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureOnStatePersistenceTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerFailureOnStatePersistenceTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.client.internal.ParentTaskAssigningClient; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.index.engine.VersionConflictEngineException; import org.elasticsearch.index.shard.ShardId; @@ -217,7 +218,7 @@ public void fail(Throwable exception, String failureMessage, ActionListener(IndexerState.STOPPED), @@ -299,7 +300,7 @@ public void fail(Throwable exception, String failureMessage, ActionListener(IndexerState.STOPPED), @@ -430,7 +431,7 @@ public void fail(Throwable exception, String failureMessage, ActionListener(IndexerState.STOPPED), diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java index 638a66fa3fb0d..9e72a92da5bee 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerStateTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.ESTestCase; @@ -79,16 +78,14 @@ public class TransformIndexerStateTests extends ESTestCase { private static final SearchResponse ONE_HIT_SEARCH_RESPONSE = new SearchResponse( - new InternalSearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), - // Simulate completely null aggs - null, - new Suggest(Collections.emptyList()), - new SearchProfileResults(Collections.emptyMap()), - false, - false, - 1 - ), + new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + // Simulate completely null aggs + null, + new Suggest(Collections.emptyList()), + false, + false, + new SearchProfileResults(Collections.emptyMap()), + 1, "", 1, 1, @@ -805,7 +802,7 @@ private MockedTransformIndexer createMockIndexer( transformConfigManager, mock(TransformCheckpointService.class), transformAuditor, - new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY) + new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO) ); MockedTransformIndexer indexer = new MockedTransformIndexer( @@ -839,7 +836,7 @@ private MockedTransformIndexerForStatePersistenceTesting createMockIndexerForSta transformConfigManager, mock(TransformCheckpointService.class), transformAuditor, - new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY) + new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO) ); MockedTransformIndexerForStatePersistenceTesting indexer = new MockedTransformIndexerForStatePersistenceTesting( diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java index 6406308312f04..372aef3d0eea7 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformIndexerTests.java @@ -26,7 +26,6 @@ import org.elasticsearch.index.reindex.DeleteByQueryRequest; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.search.profile.SearchProfileResults; import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.ESTestCase; @@ -75,16 +74,14 @@ public class TransformIndexerTests extends ESTestCase { private static final SearchResponse ONE_HIT_SEARCH_RESPONSE = new SearchResponse( - new InternalSearchResponse( - new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), - // Simulate completely null aggs - null, - new Suggest(Collections.emptyList()), - new SearchProfileResults(Collections.emptyMap()), - false, - false, - 1 - ), + new SearchHits(new SearchHit[] { new SearchHit(1) }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1.0f), + // Simulate completely null aggs + null, + new Suggest(Collections.emptyList()), + false, + false, + new SearchProfileResults(Collections.emptyMap()), + 1, "", 1, 1, @@ -451,7 +448,7 @@ private MockedTransformIndexer createMockIndexer( transformConfigManager, mock(TransformCheckpointService.class), transformAuditor, - new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY) + new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO) ); MockedTransformIndexer indexer = new MockedTransformIndexer( diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java index b1582970d4e07..69d81c85a62d3 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformPersistentTasksExecutorTests.java @@ -24,6 +24,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.settings.ClusterSettings; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.index.shard.ShardId; @@ -444,7 +445,7 @@ private TransformPersistentTasksExecutor buildTaskExecutor() { transformsConfigManager, transformCheckpointService, mockAuditor, - new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY) + new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO) ); ClusterSettings cSettings = new ClusterSettings(Settings.EMPTY, Collections.singleton(Transform.NUM_FAILURE_RETRIES_SETTING)); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java index cda258c6daa81..12af48faf8e38 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/TransformTaskTests.java @@ -112,7 +112,7 @@ public void testStopOnFailedTaskWithStoppedIndexer() { transformsConfigManager, transformsCheckpointService, auditor, - new TransformScheduler(clock, threadPool, Settings.EMPTY) + new TransformScheduler(clock, threadPool, Settings.EMPTY, TimeValue.ZERO) ); TransformState transformState = new TransformState( @@ -134,7 +134,7 @@ public void testStopOnFailedTaskWithStoppedIndexer() { TaskId.EMPTY_TASK_ID, createTransformTaskParams(transformConfig.getId()), transformState, - new TransformScheduler(clock, threadPool, Settings.EMPTY), + new TransformScheduler(clock, threadPool, Settings.EMPTY, TimeValue.ZERO), auditor, threadPool, Collections.emptyMap() @@ -212,7 +212,7 @@ public void testStopOnFailedTaskWithoutIndexer() { TaskId.EMPTY_TASK_ID, createTransformTaskParams(transformConfig.getId()), transformState, - new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY), + new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO), auditor, threadPool, Collections.emptyMap() @@ -431,7 +431,7 @@ public void testApplyNewAuthState() { TaskId.EMPTY_TASK_ID, createTransformTaskParams(transformConfig.getId()), transformState, - new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY), + new TransformScheduler(Clock.systemUTC(), threadPool, Settings.EMPTY, TimeValue.ZERO), auditor, threadPool, Collections.emptyMap() diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java index 9b8cf9745c558..708cb3d93cbed 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/CompositeBucketsChangeCollectorTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.transform.transforms.pivot; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.TermsQueryBuilder; @@ -112,8 +111,22 @@ public void testTermsFieldCollector() throws IOException { }); Aggregations aggs = new Aggregations(Collections.singletonList(composite)); - SearchResponseSections sections = new SearchResponseSections(null, aggs, null, false, null, null, 1); - SearchResponse response = new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); + SearchResponse response = new SearchResponse( + null, + aggs, + null, + false, + null, + null, + 1, + null, + 1, + 1, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + null + ); try { collector.processSearchResponse(response); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/DateHistogramFieldCollectorTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/DateHistogramFieldCollectorTests.java index d43b4bd672a07..dab6d8518d28f 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/DateHistogramFieldCollectorTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/DateHistogramFieldCollectorTests.java @@ -8,7 +8,6 @@ package org.elasticsearch.xpack.transform.transforms.pivot; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.RangeQueryBuilder; @@ -171,16 +170,22 @@ private static QueryBuilder buildFilterQuery(ChangeCollector collector) { } private static SearchResponse buildSearchResponse(SingleValue minTimestamp, SingleValue maxTimestamp) { - SearchResponseSections sections = new SearchResponseSections( + return new SearchResponse( null, new Aggregations(Arrays.asList(minTimestamp, maxTimestamp)), null, false, null, null, - 1 + 1, + null, + 1, + 1, + 0, + 0, + ShardSearchFailure.EMPTY_ARRAY, + null ); - return new SearchResponse(sections, null, 1, 1, 0, 0, ShardSearchFailure.EMPTY_ARRAY, null); } } diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java index 37bee4a4eb999..66e7efe764732 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/pivot/PivotTests.java @@ -14,7 +14,6 @@ import org.elasticsearch.action.ActionType; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.aggregations.AggregationsPlugin; import org.elasticsearch.client.internal.Client; @@ -327,9 +326,7 @@ public void testPreviewForCompositeAggregation() throws Exception { } private static SearchResponse searchResponseFromAggs(Aggregations aggs) { - SearchResponseSections sections = new SearchResponseSections(null, aggs, null, false, null, null, 1); - SearchResponse searchResponse = new SearchResponse(sections, null, 10, 5, 0, 0, new ShardSearchFailure[0], null); - return searchResponse; + return new SearchResponse(null, aggs, null, false, null, null, 1, null, 10, 5, 0, 0, new ShardSearchFailure[0], null); } private class MyMockClient extends NoOpClient { @@ -359,17 +356,14 @@ protected void } } - final SearchResponseSections sections = new SearchResponseSections( + final SearchResponse response = new SearchResponse( new SearchHits(new SearchHit[0], new TotalHits(0L, TotalHits.Relation.EQUAL_TO), 0), null, null, false, null, null, - 1 - ); - final SearchResponse response = new SearchResponse( - sections, + 1, null, 10, searchFailures.size() > 0 ? 0 : 5, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueueTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueueTests.java index 4748189745f1b..5030d42f9c17c 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueueTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskQueueTests.java @@ -9,6 +9,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; +import org.elasticsearch.xpack.transform.Transform; import org.elasticsearch.xpack.transform.transforms.scheduling.TransformScheduler.Event; import org.hamcrest.Matchers; import org.junit.After; @@ -197,7 +198,7 @@ public void testUpdatePriority() { private static TransformScheduledTask createTask(String transformId, long nextScheduledTimeMillis) { return new TransformScheduledTask( transformId, - null, + Transform.DEFAULT_SCHEDULER_FREQUENCY, null, 0, nextScheduledTimeMillis, diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskTests.java index fd8a1de429c14..5d2efdd23a0af 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformScheduledTaskTests.java @@ -32,11 +32,6 @@ public void testBasics() { assertThat(task.getListener(), is(equalTo(LISTENER))); } - public void testDefaultFrequency() { - TransformScheduledTask task = new TransformScheduledTask(TRANSFORM_ID, null, LAST_TRIGGERED_TIME_MILLIS, 0, 0, LISTENER); - assertThat(task.getFrequency(), is(equalTo(DEFAULT_FREQUENCY))); - } - public void testNextScheduledTimeMillis() { { TransformScheduledTask task = new TransformScheduledTask(TRANSFORM_ID, FREQUENCY, LAST_TRIGGERED_TIME_MILLIS, 0, 123, LISTENER); diff --git a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformSchedulerTests.java b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformSchedulerTests.java index 7125b4074bc4a..8d3220a5b4de3 100644 --- a/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformSchedulerTests.java +++ b/x-pack/plugin/transform/src/test/java/org/elasticsearch/xpack/transform/transforms/scheduling/TransformSchedulerTests.java @@ -61,49 +61,59 @@ public void shutdownThreadPool() { } public void testScheduling() { + testScheduling(5, 0); + } + + public void testScheduling_withMinFrequency() { + testScheduling(1, 5); + } + + // Note: frequencySeconds and minFrequencySeconds together should lead to an expected frequency of 5 seconds. + private void testScheduling(int frequencySeconds, int minFreqencySeconds) { String transformId = "test-with-fake-clock"; - int frequencySeconds = 5; TimeValue frequency = TimeValue.timeValueSeconds(frequencySeconds); + TimeValue minFrequency = TimeValue.timeValueSeconds(minFreqencySeconds); + TimeValue fiveSeconds = TimeValue.timeValueSeconds(5); TransformTaskParams transformTaskParams = new TransformTaskParams(transformId, TransformConfigVersion.CURRENT, frequency, false); FakeClock clock = new FakeClock(Instant.ofEpochMilli(0)); CopyOnWriteArrayList events = new CopyOnWriteArrayList<>(); TransformScheduler.Listener listener = events::add; - TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS); + TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, minFrequency); transformScheduler.registerTransform(transformTaskParams, listener); assertThat( transformScheduler.getTransformScheduledTasks(), - contains(new TransformScheduledTask(transformId, frequency, 0L, 0, 5000, listener)) + contains(new TransformScheduledTask(transformId, fiveSeconds, 0L, 0, 5000, listener)) ); assertThat(events, hasSize(1)); - for (int i = 0; i < frequencySeconds; ++i) { + for (int i = 0; i < 5; ++i) { transformScheduler.processScheduledTasks(); assertThat( transformScheduler.getTransformScheduledTasks(), - contains(new TransformScheduledTask(transformId, frequency, 0L, 0, 5000, listener)) + contains(new TransformScheduledTask(transformId, fiveSeconds, 0L, 0, 5000, listener)) ); assertThat(events, hasSize(1)); clock.advanceTimeBy(Duration.ofMillis(1001)); } assertThat(clock.instant(), is(equalTo(Instant.ofEpochMilli(5005)))); - for (int i = 0; i < frequencySeconds; ++i) { + for (int i = 0; i < 5; ++i) { transformScheduler.processScheduledTasks(); assertThat( transformScheduler.getTransformScheduledTasks(), - contains(new TransformScheduledTask(transformId, frequency, 5005L, 0, 10005, listener)) + contains(new TransformScheduledTask(transformId, fiveSeconds, 5005L, 0, 10005, listener)) ); assertThat(events, hasSize(2)); clock.advanceTimeBy(Duration.ofMillis(1001)); } assertThat(clock.instant(), is(equalTo(Instant.ofEpochMilli(10010)))); - for (int i = 0; i < frequencySeconds; ++i) { + for (int i = 0; i < 5; ++i) { transformScheduler.processScheduledTasks(); assertThat( transformScheduler.getTransformScheduledTasks(), - contains(new TransformScheduledTask(transformId, frequency, 10010L, 0, 15010, listener)) + contains(new TransformScheduledTask(transformId, fiveSeconds, 10010L, 0, 15010, listener)) ); assertThat(events, hasSize(3)); clock.advanceTimeBy(Duration.ofMillis(1001)); @@ -128,7 +138,7 @@ public void testSchedulingWithFailures() { CopyOnWriteArrayList events = new CopyOnWriteArrayList<>(); TransformScheduler.Listener listener = events::add; - TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS); + TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); transformScheduler.registerTransform(transformTaskParams, listener); assertThat( transformScheduler.getTransformScheduledTasks(), @@ -180,7 +190,7 @@ public void testScheduleNow() { CopyOnWriteArrayList events = new CopyOnWriteArrayList<>(); TransformScheduler.Listener listener = events::add; - TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS); + TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); transformScheduler.registerTransform(transformTaskParams, listener); assertThat( transformScheduler.getTransformScheduledTasks(), @@ -230,7 +240,7 @@ public void testConcurrentProcessing() throws Exception { CopyOnWriteArrayList events = new CopyOnWriteArrayList<>(); TransformScheduler.Listener listener = events::add; - TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS); + TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); transformScheduler.registerTransform(transformTaskParams, listener); assertThat( transformScheduler.getTransformScheduledTasks(), @@ -267,7 +277,7 @@ public void testConcurrentModifications() { FakeClock clock = new FakeClock(Instant.ofEpochMilli(0)); CopyOnWriteArrayList events = new CopyOnWriteArrayList<>(); - TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS); + TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); TransformScheduler.Listener taskModifyingListener = new TransformScheduler.Listener() { private boolean firstTime = true; @@ -309,7 +319,7 @@ public void testSchedulingWithSystemClock() throws Exception { Clock clock = Clock.systemUTC(); CopyOnWriteArrayList events = new CopyOnWriteArrayList<>(); - TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS); + TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); transformScheduler.start(); transformScheduler.registerTransform(transformTaskParams, events::add); assertThat(events, hasSize(1)); @@ -334,7 +344,7 @@ public void testScheduleNowWithSystemClock() throws Exception { Clock clock = Clock.systemUTC(); CopyOnWriteArrayList events = new CopyOnWriteArrayList<>(); - TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS); + TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); transformScheduler.start(); transformScheduler.registerTransform(transformTaskParams, events::add); assertThat(events, hasSize(1)); @@ -391,7 +401,7 @@ public void testRegisterMultipleTransforms() { CopyOnWriteArrayList events = new CopyOnWriteArrayList<>(); TransformScheduler.Listener listener = events::add; - TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS); + TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); transformScheduler.registerTransform(transformTaskParams1, listener); transformScheduler.registerTransform(transformTaskParams2, listener); transformScheduler.registerTransform(transformTaskParams3, listener); @@ -421,7 +431,7 @@ public void testMultipleTransformsEligibleForProcessingAtOnce() { CopyOnWriteArrayList events = new CopyOnWriteArrayList<>(); TransformScheduler.Listener listener = events::add; - TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS); + TransformScheduler transformScheduler = new TransformScheduler(clock, threadPool, SETTINGS, TimeValue.ZERO); transformScheduler.registerTransform(transformTaskParams1, listener); transformScheduler.registerTransform(transformTaskParams2, listener); transformScheduler.registerTransform(transformTaskParams3, listener); diff --git a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java index f3332cb50e27b..63850e11ae64b 100644 --- a/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java +++ b/x-pack/plugin/vector-tile/src/main/java/org/elasticsearch/xpack/vectortile/rest/RestVectorTileAction.java @@ -11,7 +11,6 @@ import org.elasticsearch.action.search.SearchRequestBuilder; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.client.internal.node.NodeClient; import org.elasticsearch.common.document.DocumentField; import org.elasticsearch.common.geo.GeoBoundingBox; @@ -147,21 +146,14 @@ public RestResponse buildResponse(SearchResponse searchResponse) throws Exceptio .collect(Collectors.toList()) ); final SearchResponse meta = new SearchResponse( - new SearchResponseSections( - new SearchHits( - SearchHits.EMPTY, - searchResponse.getHits().getTotalHits(), - searchResponse.getHits().getMaxScore() - ), // remove actual hits - aggsWithoutGridAndBounds, - searchResponse.getSuggest(), - searchResponse.isTimedOut(), - searchResponse.isTerminatedEarly(), - searchResponse.getProfileResults() == null - ? null - : new SearchProfileResults(searchResponse.getProfileResults()), - searchResponse.getNumReducePhases() - ), + // remove actual hits + new SearchHits(SearchHits.EMPTY, searchResponse.getHits().getTotalHits(), searchResponse.getHits().getMaxScore()), + aggsWithoutGridAndBounds, + searchResponse.getSuggest(), + searchResponse.isTimedOut(), + searchResponse.isTerminatedEarly(), + searchResponse.getProfileResults() == null ? null : new SearchProfileResults(searchResponse.getProfileResults()), + searchResponse.getNumReducePhases(), searchResponse.getScrollId(), searchResponse.getTotalShards(), searchResponse.getSuccessfulShards(), diff --git a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java index dbb7b7d93c2e3..f02b3f865adf0 100644 --- a/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java +++ b/x-pack/plugin/watcher/src/internalClusterTest/java/org/elasticsearch/xpack/watcher/condition/CompareConditionSearchTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.search.aggregations.AggregationBuilders; import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xpack.core.watcher.execution.WatchExecutionContext; import org.elasticsearch.xpack.core.watcher.watch.Payload; @@ -105,17 +104,14 @@ public void testExecuteAccessHits() throws Exception { hit.score(1f); hit.shard(new SearchShardTarget("a", new ShardId("a", "indexUUID", 0), null)); - InternalSearchResponse internalSearchResponse = new InternalSearchResponse( + SearchResponse response = new SearchResponse( new SearchHits(new SearchHit[] { hit }, new TotalHits(1L, TotalHits.Relation.EQUAL_TO), 1f), null, null, - null, false, false, - 1 - ); - SearchResponse response = new SearchResponse( - internalSearchResponse, + null, + 1, "", 3, 3, diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java index 1dede3f4e135c..c2ed68d8fa1bd 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherServiceTests.java @@ -16,7 +16,6 @@ import org.elasticsearch.action.search.ClearScrollResponse; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.action.search.SearchScrollRequest; import org.elasticsearch.action.search.ShardSearchFailure; import org.elasticsearch.action.search.TransportClearScrollAction; @@ -43,6 +42,7 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.SearchShardTarget; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; @@ -172,29 +172,21 @@ void stopExecutor() {} return null; }).when(client).execute(eq(RefreshAction.INSTANCE), any(RefreshRequest.class), anyActionListener()); - // empty scroll response, no further scrolling needed - SearchResponseSections scrollSearchSections = new SearchResponseSections( - SearchHits.EMPTY_WITH_TOTAL_HITS, - null, - null, - false, - false, - null, - 1 - ); - SearchResponse scrollSearchResponse = new SearchResponse( - scrollSearchSections, - "scrollId", - 1, - 1, - 0, - 10, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ); doAnswer(invocation -> { ActionListener listener = (ActionListener) invocation.getArguments()[2]; - listener.onResponse(scrollSearchResponse); + // empty scroll response, no further scrolling needed + ActionListener.respondAndRelease( + listener, + SearchResponseUtils.emptyWithTotalHits( + "scrollId", + 1, + 1, + 0, + 10, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ) + ); return null; }).when(client).execute(eq(TransportSearchScrollAction.TYPE), any(SearchScrollRequest.class), anyActionListener()); @@ -221,20 +213,27 @@ void stopExecutor() {} when(parser.parse(eq(id), eq(true), any(), eq(XContentType.JSON), anyLong(), anyLong())).thenReturn(watch); } SearchHits searchHits = new SearchHits(hits, new TotalHits(count, TotalHits.Relation.EQUAL_TO), 1.0f); - SearchResponseSections sections = new SearchResponseSections(searchHits, null, null, false, false, null, 1); - SearchResponse searchResponse = new SearchResponse( - sections, - "scrollId", - 1, - 1, - 0, - 10, - ShardSearchFailure.EMPTY_ARRAY, - SearchResponse.Clusters.EMPTY - ); doAnswer(invocation -> { ActionListener listener = (ActionListener) invocation.getArguments()[2]; - listener.onResponse(searchResponse); + ActionListener.respondAndRelease( + listener, + new SearchResponse( + searchHits, + null, + null, + false, + false, + null, + 1, + "scrollId", + 1, + 1, + 0, + 10, + ShardSearchFailure.EMPTY_ARRAY, + SearchResponse.Clusters.EMPTY + ) + ); return null; }).when(client).execute(eq(TransportSearchAction.TYPE), any(SearchRequest.class), anyActionListener()); diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java index 89ddb2c0011bb..fa0dc89fd5106 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/condition/ScriptConditionTests.java @@ -21,7 +21,7 @@ import org.elasticsearch.script.ScriptMetadata; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; -import org.elasticsearch.search.internal.InternalSearchResponse; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xcontent.ToXContent; import org.elasticsearch.xcontent.XContentBuilder; @@ -95,8 +95,7 @@ public void init() throws IOException { public void testExecute() throws Exception { ScriptCondition condition = new ScriptCondition(mockScript("ctx.payload.hits.total.value > 1"), scriptService); - SearchResponse response = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + SearchResponse response = SearchResponseUtils.emptyWithTotalHits( "", 3, 3, @@ -121,8 +120,7 @@ public void testExecuteMergedParams() throws Exception { singletonMap("threshold", 1) ); ScriptCondition executable = new ScriptCondition(script, scriptService); - SearchResponse response = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + SearchResponse response = SearchResponseUtils.emptyWithTotalHits( "", 3, 3, @@ -147,8 +145,7 @@ public void testParserValid() throws Exception { parser.nextToken(); ExecutableCondition executable = ScriptCondition.parse(scriptService, "_watch", parser); - SearchResponse response = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + SearchResponse response = SearchResponseUtils.emptyWithTotalHits( "", 3, 3, @@ -223,8 +220,7 @@ public void testScriptConditionParser_badLang() throws Exception { public void testScriptConditionThrowException() throws Exception { ScriptCondition condition = new ScriptCondition(mockScript("null.foo"), scriptService); - SearchResponse response = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + SearchResponse response = SearchResponseUtils.emptyWithTotalHits( "", 3, 3, @@ -247,8 +243,7 @@ public void testScriptConditionAccessCtx() throws Exception { mockScript("ctx.trigger.scheduled_time.toInstant().toEpochMill() < new Date().time"), scriptService ); - SearchResponse response = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + SearchResponse response = SearchResponseUtils.emptyWithTotalHits( "", 3, 3, diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java index 0f47df9dff12b..d25cc7168ec75 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/TriggeredWatchStoreTests.java @@ -46,8 +46,8 @@ import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.SearchShardTarget; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.ToXContent; @@ -231,26 +231,8 @@ public void testFindTriggeredWatchesGoodCase() { hit.shard(new SearchShardTarget("_node_id", new ShardId(index, 0), null)); hit.sourceRef(source); hits = new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1.0f); - SearchResponse searchResponse2 = new SearchResponse( - new InternalSearchResponse(hits, null, null, null, false, null, 1), - "_scrollId1", - 1, - 1, - 0, - 1, - null, - null - ); - SearchResponse searchResponse3 = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, - "_scrollId2", - 1, - 1, - 0, - 1, - null, - null - ); + SearchResponse searchResponse2 = new SearchResponse(hits, null, null, false, null, null, 1, "_scrollId1", 1, 1, 0, 1, null, null); + SearchResponse searchResponse3 = SearchResponseUtils.emptyWithTotalHits("_scrollId2", 1, 1, 0, 1, null, null); doAnswer(invocation -> { SearchScrollRequest request = (SearchScrollRequest) invocation.getArguments()[1]; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java index d06ee606f31ce..172338d60bbe1 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/test/integration/SearchInputTests.java @@ -22,8 +22,8 @@ import org.elasticsearch.script.ScriptEngine; import org.elasticsearch.script.ScriptService; import org.elasticsearch.search.SearchModule; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.search.builder.SearchSourceBuilder; -import org.elasticsearch.search.internal.InternalSearchResponse; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xcontent.NamedXContentRegistry; @@ -91,8 +91,7 @@ public void setup() { public void testExecute() throws Exception { ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(SearchRequest.class); PlainActionFuture searchFuture = new PlainActionFuture<>(); - SearchResponse searchResponse = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + SearchResponse searchResponse = SearchResponseUtils.emptyWithTotalHits( "", 1, 1, @@ -132,8 +131,7 @@ public void testExecute() throws Exception { public void testDifferentSearchType() throws Exception { ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(SearchRequest.class); PlainActionFuture searchFuture = new PlainActionFuture<>(); - SearchResponse searchResponse = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + SearchResponse searchResponse = SearchResponseUtils.emptyWithTotalHits( "", 1, 1, @@ -187,8 +185,7 @@ public void testParserValid() throws Exception { public void testThatEmptyRequestBodyWorks() throws Exception { ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(SearchRequest.class); PlainActionFuture searchFuture = new PlainActionFuture<>(); - SearchResponse searchResponse = new SearchResponse( - InternalSearchResponse.EMPTY_WITH_TOTAL_HITS, + SearchResponse searchResponse = SearchResponseUtils.emptyWithTotalHits( "", 1, 1, diff --git a/x-pack/qa/oidc-op-tests/build.gradle b/x-pack/qa/oidc-op-tests/build.gradle index 3987073b1e6ad..8f46613d5d9f0 100644 --- a/x-pack/qa/oidc-op-tests/build.gradle +++ b/x-pack/qa/oidc-op-tests/build.gradle @@ -1,16 +1,13 @@ import org.elasticsearch.gradle.internal.info.BuildParams apply plugin: 'elasticsearch.internal-java-rest-test' -apply plugin: 'elasticsearch.test.fixtures' dependencies { javaRestTestImplementation(testArtifact(project(xpackModule('core')))) javaRestTestImplementation(testArtifact(project(xpackModule('security')))) + javaRestTestImplementation project(":x-pack:test:idp-fixture") } -testFixtures.useFixture ":x-pack:test:idp-fixture", "http-proxy" -testFixtures.useFixture ":x-pack:test:idp-fixture", "oidc-provider" - tasks.named('javaRestTest') { usesDefaultDistribution() // test suite uses jks which is not supported in fips mode diff --git a/x-pack/qa/oidc-op-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtWithOidcAuthIT.java b/x-pack/qa/oidc-op-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtWithOidcAuthIT.java index 2d3fc611758b0..18224c887c663 100644 --- a/x-pack/qa/oidc-op-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtWithOidcAuthIT.java +++ b/x-pack/qa/oidc-op-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/jwt/JwtWithOidcAuthIT.java @@ -106,7 +106,7 @@ public void testAuthenticateWithOidcIssuedJwt() throws Exception { new Scope(OIDCScopeValue.OPENID), new ClientID(clientId), new URI(redirectUri) - ).endpointURI(new URI(C2ID_AUTH_ENDPOINT)).state(new State(state)).nonce(new Nonce(nonce)).build(); + ).endpointURI(new URI(c2id.getC2OPUrl() + "/c2id-login")).state(new State(state)).nonce(new Nonce(nonce)).build(); final String implicitFlowURI = authenticateAtOP(oidcAuthRequest.toURI()); diff --git a/x-pack/qa/oidc-op-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/oidc/C2IdOpTestCase.java b/x-pack/qa/oidc-op-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/oidc/C2IdOpTestCase.java index 106e1d27910f2..56d3bbe77c78a 100644 --- a/x-pack/qa/oidc-op-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/oidc/C2IdOpTestCase.java +++ b/x-pack/qa/oidc-op-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/oidc/C2IdOpTestCase.java @@ -10,6 +10,8 @@ import net.minidev.json.JSONObject; import net.minidev.json.parser.JSONParser; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + import org.apache.http.HttpHost; import org.apache.http.HttpResponse; import org.apache.http.StatusLine; @@ -38,11 +40,17 @@ import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.fixtures.idp.HttpProxyTestContainer; +import org.elasticsearch.test.fixtures.idp.OidcProviderTestContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xpack.core.common.socket.SocketAccess; import org.hamcrest.Matchers; import org.junit.BeforeClass; import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +import org.testcontainers.containers.Network; import java.io.FileNotFoundException; import java.io.IOException; @@ -56,25 +64,21 @@ import static org.hamcrest.Matchers.equalTo; +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public abstract class C2IdOpTestCase extends ESRestTestCase { protected static final String TEST_SUBJECT_ID = "alice"; - // URLs for accessing the C2id OP - private static final String C2OP_PORT = getEphemeralTcpPortFromProperty("oidc-provider", "8080"); - private static final String C2ID_HOST = "http://127.0.0.1:" + C2OP_PORT; - protected static final String C2ID_ISSUER = C2ID_HOST + "/c2id"; - private static final String PROXY_PORT = getEphemeralTcpPortFromProperty("http-proxy", "8888"); - private static final String C2ID_LOGIN_API = C2ID_HOST + "/c2id-login/api/"; - private static final String C2ID_REGISTRATION_URL = C2ID_HOST + "/c2id/clients"; - protected static final String C2ID_AUTH_ENDPOINT = C2ID_HOST + "/c2id-login"; - // SHA256 of this is defined in x-pack/test/idp-fixture/oidc/override.properties private static final String OP_API_BEARER_TOKEN = "811fa888f3e0fdc9e01d4201bfeee46a"; private static Path HTTP_TRUSTED_CERT; private static final String CLIENT_SECRET = "b07efb7a1cf6ec9462afe7b6d3ab55c6c7880262aa61ac28dded292aca47c9a2"; - @ClassRule + + private static Network network = Network.newNetwork(); + protected static OidcProviderTestContainer c2id = new OidcProviderTestContainer(network); + protected static HttpProxyTestContainer proxy = new HttpProxyTestContainer(network); + public static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) .nodes(1) @@ -86,10 +90,10 @@ public abstract class C2IdOpTestCase extends ESRestTestCase { .setting("xpack.security.authc.realms.file.file.order", "0") .setting("xpack.security.authc.realms.native.native.order", "1") .setting("xpack.security.authc.realms.oidc.c2id.order", "2") - .setting("xpack.security.authc.realms.oidc.c2id.op.issuer", C2ID_ISSUER) - .setting("xpack.security.authc.realms.oidc.c2id.op.authorization_endpoint", C2ID_HOST + "/c2id-login") - .setting("xpack.security.authc.realms.oidc.c2id.op.token_endpoint", C2ID_HOST + "/c2id/token") - .setting("xpack.security.authc.realms.oidc.c2id.op.userinfo_endpoint", C2ID_HOST + "/c2id/userinfo") + .setting("xpack.security.authc.realms.oidc.c2id.op.issuer", () -> c2id.getC2IssuerUrl()) + .setting("xpack.security.authc.realms.oidc.c2id.op.authorization_endpoint", () -> c2id.getC2OPUrl() + "/c2id-login") + .setting("xpack.security.authc.realms.oidc.c2id.op.token_endpoint", () -> c2id.getC2OPUrl() + "/c2id/token") + .setting("xpack.security.authc.realms.oidc.c2id.op.userinfo_endpoint", () -> c2id.getC2OPUrl() + "/c2id/userinfo") .setting("xpack.security.authc.realms.oidc.c2id.op.jwkset_path", "op-jwks.json") .setting("xpack.security.authc.realms.oidc.c2id.rp.redirect_uri", "https://my.fantastic.rp/cb") .setting("xpack.security.authc.realms.oidc.c2id.rp.client_id", "https://my.elasticsearch.org/rp") @@ -99,10 +103,10 @@ public abstract class C2IdOpTestCase extends ESRestTestCase { .setting("xpack.security.authc.realms.oidc.c2id.claims.mail", "email") .setting("xpack.security.authc.realms.oidc.c2id.claims.groups", "groups") .setting("xpack.security.authc.realms.oidc.c2id-implicit.order", "3") - .setting("xpack.security.authc.realms.oidc.c2id-implicit.op.issuer", C2ID_ISSUER) - .setting("xpack.security.authc.realms.oidc.c2id-implicit.op.authorization_endpoint", C2ID_HOST + "/c2id-login") - .setting("xpack.security.authc.realms.oidc.c2id-implicit.op.token_endpoint", C2ID_HOST + "/c2id/token") - .setting("xpack.security.authc.realms.oidc.c2id-implicit.op.userinfo_endpoint", C2ID_HOST + "/c2id/userinfo") + .setting("xpack.security.authc.realms.oidc.c2id-implicit.op.issuer", () -> c2id.getC2IssuerUrl()) + .setting("xpack.security.authc.realms.oidc.c2id-implicit.op.authorization_endpoint", () -> c2id.getC2OPUrl() + "/c2id-login") + .setting("xpack.security.authc.realms.oidc.c2id-implicit.op.token_endpoint", () -> c2id.getC2OPUrl() + "/c2id/token") + .setting("xpack.security.authc.realms.oidc.c2id-implicit.op.userinfo_endpoint", () -> c2id.getC2OPUrl() + "/c2id/userinfo") .setting("xpack.security.authc.realms.oidc.c2id-implicit.op.jwkset_path", "op-jwks.json") .setting("xpack.security.authc.realms.oidc.c2id-implicit.rp.redirect_uri", "https://my.fantastic.rp/cb") .setting("xpack.security.authc.realms.oidc.c2id-implicit.rp.client_id", "elasticsearch-rp") @@ -112,10 +116,10 @@ public abstract class C2IdOpTestCase extends ESRestTestCase { .setting("xpack.security.authc.realms.oidc.c2id-implicit.claims.mail", "email") .setting("xpack.security.authc.realms.oidc.c2id-implicit.claims.groups", "groups") .setting("xpack.security.authc.realms.oidc.c2id-proxy.order", "4") - .setting("xpack.security.authc.realms.oidc.c2id-proxy.op.issuer", C2ID_ISSUER) - .setting("xpack.security.authc.realms.oidc.c2id-proxy.op.authorization_endpoint", C2ID_HOST + "/c2id-login") - .setting("xpack.security.authc.realms.oidc.c2id-proxy.op.token_endpoint", C2ID_HOST + "/c2id/token") - .setting("xpack.security.authc.realms.oidc.c2id-proxy.op.userinfo_endpoint", C2ID_HOST + "/c2id/userinfo") + .setting("xpack.security.authc.realms.oidc.c2id-proxy.op.issuer", () -> c2id.getC2IssuerUrl()) + .setting("xpack.security.authc.realms.oidc.c2id-proxy.op.authorization_endpoint", () -> c2id.getC2OPUrl() + "/c2id-login") + .setting("xpack.security.authc.realms.oidc.c2id-proxy.op.token_endpoint", () -> c2id.getC2OPUrl() + "/c2id/token") + .setting("xpack.security.authc.realms.oidc.c2id-proxy.op.userinfo_endpoint", () -> c2id.getC2OPUrl() + "/c2id/userinfo") .setting("xpack.security.authc.realms.oidc.c2id-proxy.op.jwkset_path", "op-jwks.json") .setting("xpack.security.authc.realms.oidc.c2id-proxy.rp.redirect_uri", "https://my.fantastic.rp/cb") .setting("xpack.security.authc.realms.oidc.c2id-proxy.rp.client_id", "https://my.elasticsearch.org/rp") @@ -125,12 +129,12 @@ public abstract class C2IdOpTestCase extends ESRestTestCase { .setting("xpack.security.authc.realms.oidc.c2id-proxy.claims.mail", "email") .setting("xpack.security.authc.realms.oidc.c2id-proxy.claims.groups", "groups") .setting("xpack.security.authc.realms.oidc.c2id-proxy.http.proxy.host", "127.0.0.1") - .setting("xpack.security.authc.realms.oidc.c2id-proxy.http.proxy.port", PROXY_PORT) + .setting("xpack.security.authc.realms.oidc.c2id-proxy.http.proxy.port", () -> proxy.getProxyPort().toString()) .setting("xpack.security.authc.realms.oidc.c2id-post.order", "5") - .setting("xpack.security.authc.realms.oidc.c2id-post.op.issuer", C2ID_ISSUER) - .setting("xpack.security.authc.realms.oidc.c2id-post.op.authorization_endpoint", C2ID_HOST + "/c2id-login") - .setting("xpack.security.authc.realms.oidc.c2id-post.op.token_endpoint", C2ID_HOST + "/c2id/token") - .setting("xpack.security.authc.realms.oidc.c2id-post.op.userinfo_endpoint", C2ID_HOST + "/c2id/userinfo") + .setting("xpack.security.authc.realms.oidc.c2id-post.op.issuer", () -> c2id.getC2IssuerUrl()) + .setting("xpack.security.authc.realms.oidc.c2id-post.op.authorization_endpoint", () -> c2id.getC2OPUrl() + "/c2id-login") + .setting("xpack.security.authc.realms.oidc.c2id-post.op.token_endpoint", () -> c2id.getC2OPUrl() + "/c2id/token") + .setting("xpack.security.authc.realms.oidc.c2id-post.op.userinfo_endpoint", () -> c2id.getC2OPUrl() + "/c2id/userinfo") .setting("xpack.security.authc.realms.oidc.c2id-post.op.jwkset_path", "op-jwks.json") .setting("xpack.security.authc.realms.oidc.c2id-post.rp.redirect_uri", "https://my.fantastic.rp/cb") .setting("xpack.security.authc.realms.oidc.c2id-post.rp.client_id", "elasticsearch-post") @@ -141,10 +145,10 @@ public abstract class C2IdOpTestCase extends ESRestTestCase { .setting("xpack.security.authc.realms.oidc.c2id-post.claims.mail", "email") .setting("xpack.security.authc.realms.oidc.c2id-post.claims.groups", "groups") .setting("xpack.security.authc.realms.oidc.c2id-jwt.order", "6") - .setting("xpack.security.authc.realms.oidc.c2id-jwt.op.issuer", C2ID_ISSUER) - .setting("xpack.security.authc.realms.oidc.c2id-jwt.op.authorization_endpoint", C2ID_HOST + "/c2id-login") - .setting("xpack.security.authc.realms.oidc.c2id-jwt.op.token_endpoint", C2ID_HOST + "/c2id/token") - .setting("xpack.security.authc.realms.oidc.c2id-jwt.op.userinfo_endpoint", C2ID_HOST + "/c2id/userinfo") + .setting("xpack.security.authc.realms.oidc.c2id-jwt.op.issuer", () -> c2id.getC2IssuerUrl()) + .setting("xpack.security.authc.realms.oidc.c2id-jwt.op.authorization_endpoint", () -> c2id.getC2OPUrl() + "/c2id-login") + .setting("xpack.security.authc.realms.oidc.c2id-jwt.op.token_endpoint", () -> c2id.getC2OPUrl() + "/c2id/token") + .setting("xpack.security.authc.realms.oidc.c2id-jwt.op.userinfo_endpoint", () -> c2id.getC2OPUrl() + "/c2id/userinfo") .setting("xpack.security.authc.realms.oidc.c2id-jwt.op.jwkset_path", "op-jwks.json") .setting("xpack.security.authc.realms.oidc.c2id-jwt.rp.redirect_uri", "https://my.fantastic.rp/cb") .setting("xpack.security.authc.realms.oidc.c2id-jwt.rp.client_id", "elasticsearch-post-jwt") @@ -155,7 +159,7 @@ public abstract class C2IdOpTestCase extends ESRestTestCase { .setting("xpack.security.authc.realms.oidc.c2id-jwt.claims.mail", "email") .setting("xpack.security.authc.realms.oidc.c2id-jwt.claims.groups", "groups") .setting("xpack.security.authc.realms.jwt.op-jwt.order", "7") - .setting("xpack.security.authc.realms.jwt.op-jwt.allowed_issuer", C2ID_ISSUER) + .setting("xpack.security.authc.realms.jwt.op-jwt.allowed_issuer", () -> c2id.getC2IssuerUrl()) .setting("xpack.security.authc.realms.jwt.op-jwt.allowed_audiences", "elasticsearch-jwt1,elasticsearch-jwt2") .setting("xpack.security.authc.realms.jwt.op-jwt.pkc_jwkset_path", "op-jwks.json") .setting("xpack.security.authc.realms.jwt.op-jwt.claims.principal", "sub") @@ -174,6 +178,9 @@ public abstract class C2IdOpTestCase extends ESRestTestCase { .user("x_pack_rest_user", "x-pack-test-password", "superuser", false) .build(); + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(network).around(c2id).around(proxy).around(cluster); + @Override protected String getTestRestCluster() { return cluster.getHttpAddresses(); @@ -193,24 +200,17 @@ public static void readTrustedCert() throws Exception { HTTP_TRUSTED_CERT = PathUtils.get(resource.toURI()); } - protected static String getEphemeralTcpPortFromProperty(String service, String port) { - String key = "test.fixtures." + service + ".tcp." + port; - final String value = System.getProperty(key); - assertNotNull("Expected the actual value for port " + port + " to be in system property " + key, value); - return value; - } - /** * Register one or more OIDC clients on the C2id server. This should be done once (per client) only. * C2id server only supports dynamic registration, so we can't pre-seed its config with our client data. */ protected static void registerClients(String... jsonBody) throws IOException { try (CloseableHttpClient httpClient = HttpClients.createDefault()) { + String ci2dRegistrationUrl = c2id.getC2OPUrl() + "/c2id/clients"; final BasicHttpContext context = new BasicHttpContext(); - final List requests = new ArrayList<>(jsonBody.length); for (String body : jsonBody) { - HttpPost httpPost = new HttpPost(C2ID_REGISTRATION_URL); + HttpPost httpPost = new HttpPost(ci2dRegistrationUrl); httpPost.setEntity(new StringEntity(body, ContentType.APPLICATION_JSON)); httpPost.setHeader("Accept", "application/json"); httpPost.setHeader("Content-type", "application/json"); @@ -240,12 +240,13 @@ protected Settings restAdminSettings() { } protected String authenticateAtOP(URI opAuthUri) throws Exception { + String c2LoginApi = c2id.getC2OPUrl() + "/c2id-login/api/"; // C2ID doesn't have a non JS login page :/, so use their API directly // see https://connect2id.com/products/server/docs/guides/login-page try (CloseableHttpClient httpClient = HttpClients.createDefault()) { final BasicHttpContext context = new BasicHttpContext(); // Initiate the authentication process - HttpPost httpPost = new HttpPost(C2ID_LOGIN_API + "initAuthRequest"); + HttpPost httpPost = new HttpPost(c2LoginApi + "initAuthRequest"); String initJson = Strings.format(""" {"qs":"%s"} """, opAuthUri.getRawQuery()); @@ -258,7 +259,7 @@ protected String authenticateAtOP(URI opAuthUri) throws Exception { final String sid = initResponse.getAsString("sid"); // Actually authenticate the user with ldapAuth HttpPost loginHttpPost = new HttpPost( - C2ID_LOGIN_API + "authenticateSubject?cacheBuster=" + randomAlphaOfLength(8) + "&authSessionId=" + sid + c2LoginApi + "authenticateSubject?cacheBuster=" + randomAlphaOfLength(8) + "&authSessionId=" + sid ); String loginJson = """ {"username":"alice","password":"secret"}"""; @@ -268,9 +269,7 @@ protected String authenticateAtOP(URI opAuthUri) throws Exception { return parseJsonResponse(response); }); - HttpPut consentHttpPut = new HttpPut( - C2ID_LOGIN_API + "updateAuthRequest" + "/" + sid + "?cacheBuster=" + randomAlphaOfLength(8) - ); + HttpPut consentHttpPut = new HttpPut(c2LoginApi + "updateAuthRequest" + "/" + sid + "?cacheBuster=" + randomAlphaOfLength(8)); String consentJson = """ {"claims":["name", "email"],"scope":["openid"]}"""; configureJsonRequest(consentHttpPut, consentJson); diff --git a/x-pack/qa/oidc-op-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthIT.java b/x-pack/qa/oidc-op-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthIT.java index 9302f731ff285..cd37d86626333 100644 --- a/x-pack/qa/oidc-op-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthIT.java +++ b/x-pack/qa/oidc-op-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/oidc/OpenIdConnectAuthIT.java @@ -182,7 +182,7 @@ private void verifyElasticsearchAccessTokenForCodeFlow(String accessToken) throw assertThat(map.get("metadata"), instanceOf(Map.class)); final Map metadata = (Map) map.get("metadata"); assertThat(metadata.get("oidc(sub)"), equalTo("alice")); - assertThat(metadata.get("oidc(iss)"), equalTo(C2ID_ISSUER)); + assertThat(metadata.get("oidc(iss)"), equalTo(c2id.getC2IssuerUrl())); } private void verifyElasticsearchAccessTokenForImplicitFlow(String accessToken) throws Exception { @@ -194,7 +194,7 @@ private void verifyElasticsearchAccessTokenForImplicitFlow(String accessToken) t assertThat(map.get("metadata"), instanceOf(Map.class)); final Map metadata = (Map) map.get("metadata"); assertThat(metadata.get("oidc(sub)"), equalTo("alice")); - assertThat(metadata.get("oidc(iss)"), equalTo(C2ID_ISSUER)); + assertThat(metadata.get("oidc(iss)"), equalTo(c2id.getC2IssuerUrl())); } private PrepareAuthResponse getRedirectedFromFacilitator(String realmName) throws Exception { diff --git a/x-pack/qa/openldap-tests/build.gradle b/x-pack/qa/openldap-tests/build.gradle index 844f47c9a53f5..02b2abad3726f 100644 --- a/x-pack/qa/openldap-tests/build.gradle +++ b/x-pack/qa/openldap-tests/build.gradle @@ -1,18 +1,13 @@ apply plugin: 'elasticsearch.standalone-test' -apply plugin: 'elasticsearch.test.fixtures' dependencies { testImplementation(testArtifact(project(xpackModule('security')))) testImplementation(testArtifact(project(xpackModule('core')))) + testImplementation project(":x-pack:test:idp-fixture") + testImplementation "junit:junit:${versions.junit}" } -testFixtures.useFixture ":x-pack:test:idp-fixture", "openldap" - -Project idpFixtureProject = project(":x-pack:test:idp-fixture") -String outputDir = "${project.buildDir}/generated-resources/${project.name}" -def copyIdpTrust = tasks.register("copyIdpTrust", Copy) { - from idpFixtureProject.file('openldap/certs/ca.jks'); - from idpFixtureProject.file('openldap/certs/ca_server.pem'); - into outputDir +tasks.named('test') { + // test suite uses jks which is not supported in fips mode + systemProperty 'tests.security.manager', 'false' } -project.sourceSets.test.output.dir(outputDir, builtBy: copyIdpTrust) diff --git a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java index c1375823548df..7d1610d2ccc0f 100644 --- a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java +++ b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.test; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import com.unboundid.ldap.sdk.LDAPConnection; import com.unboundid.ldap.sdk.LDAPException; @@ -19,6 +20,8 @@ import org.elasticsearch.core.TimeValue; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; +import org.elasticsearch.test.fixtures.idp.OpenLdapTestContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.test.junit.annotations.Network; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; @@ -37,8 +40,8 @@ import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory; import org.junit.After; import org.junit.Before; +import org.junit.ClassRule; -import java.nio.file.Path; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; @@ -53,10 +56,9 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.startsWith; +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class OpenLdapTests extends ESTestCase { - public static final String OPEN_LDAP_DNS_URL = "ldaps://localhost:" + getFromProperty("636"); - /** * * ip.es.io is magic that will resolve any IP-like DNS name into the embedded IP @@ -67,11 +69,12 @@ public class OpenLdapTests extends ESTestCase { * so in order to have a "not-valid-hostname" failure, we need a second * hostname that isn't in the certificate's Subj Alt Name list */ - private static final String OPEN_LDAP_ES_IO_URL = "ldaps://127.0.0.1.ip.es.io:" + getFromProperty("636"); + + @ClassRule + public static final OpenLdapTestContainer openLdap = new OpenLdapTestContainer(); public static final String PASSWORD = "NickFuryHeartsES"; private static final String HAWKEYE_DN = "uid=hawkeye,ou=people,dc=oldap,dc=test,dc=elasticsearch,dc=com"; - public static final String LDAPTRUST_PATH = "/ca.jks"; private static final SecureString PASSWORD_SECURE_STRING = new SecureString(PASSWORD.toCharArray()); public static final String REALM_NAME = "oldap-test"; @@ -96,7 +99,6 @@ public boolean enableWarningsCheck() { @Before public void initializeSslSocketFactory() throws Exception { - Path truststore = getDataPath(LDAPTRUST_PATH); /* * Prior to each test we reinitialize the socket factory with a new SSLService so that we get a new SSLContext. * If we re-use an SSLContext, previously connected sessions can get re-established which breaks hostname @@ -105,14 +107,14 @@ public void initializeSslSocketFactory() throws Exception { MockSecureSettings mockSecureSettings = new MockSecureSettings(); Settings.Builder builder = Settings.builder().put("path.home", createTempDir()); // fake realms so ssl will get loaded - builder.put("xpack.security.authc.realms.ldap.foo.ssl.truststore.path", truststore); + builder.put("xpack.security.authc.realms.ldap.foo.ssl.truststore.path", openLdap.getJavaKeyStorePath()); mockSecureSettings.setString("xpack.security.authc.realms.ldap.foo.ssl.truststore.secure_password", "changeit"); builder.put("xpack.security.authc.realms.ldap.foo.ssl.verification_mode", SslVerificationMode.FULL); - builder.put("xpack.security.authc.realms.ldap." + REALM_NAME + ".ssl.truststore.path", truststore); + builder.put("xpack.security.authc.realms.ldap." + REALM_NAME + ".ssl.truststore.path", openLdap.getJavaKeyStorePath()); mockSecureSettings.setString("xpack.security.authc.realms.ldap." + REALM_NAME + ".ssl.truststore.secure_password", "changeit"); builder.put("xpack.security.authc.realms.ldap." + REALM_NAME + ".ssl.verification_mode", SslVerificationMode.CERTIFICATE); - builder.put("xpack.security.authc.realms.ldap.vmode_full.ssl.truststore.path", truststore); + builder.put("xpack.security.authc.realms.ldap.vmode_full.ssl.truststore.path", openLdap.getJavaKeyStorePath()); mockSecureSettings.setString("xpack.security.authc.realms.ldap.vmode_full.ssl.truststore.secure_password", "changeit"); builder.put("xpack.security.authc.realms.ldap.vmode_full.ssl.verification_mode", SslVerificationMode.FULL); globalSettings = builder.setSecureSettings(mockSecureSettings).build(); @@ -127,7 +129,7 @@ public void testConnect() throws Exception { final RealmConfig.RealmIdentifier realmId = new RealmConfig.RealmIdentifier("ldap", "oldap-test"); RealmConfig config = new RealmConfig( realmId, - buildLdapSettings(realmId, OPEN_LDAP_DNS_URL, userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL), + buildLdapSettings(realmId, openLdap.getLdapUrl(), userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL), TestEnvironment.newEnvironment(globalSettings), new ThreadContext(Settings.EMPTY) ); @@ -150,7 +152,7 @@ public void testGroupSearchScopeBase() throws Exception { final RealmConfig.RealmIdentifier realmId = new RealmConfig.RealmIdentifier("ldap", REALM_NAME); RealmConfig config = new RealmConfig( realmId, - buildLdapSettings(realmId, OPEN_LDAP_DNS_URL, userTemplate, groupSearchBase, LdapSearchScope.BASE), + buildLdapSettings(realmId, openLdap.getLdapUrl(), userTemplate, groupSearchBase, LdapSearchScope.BASE), TestEnvironment.newEnvironment(globalSettings), new ThreadContext(Settings.EMPTY) ); @@ -169,7 +171,7 @@ public void testCustomFilter() throws Exception { String userTemplate = "uid={0},ou=people,dc=oldap,dc=test,dc=elasticsearch,dc=com"; final RealmConfig.RealmIdentifier realmId = new RealmConfig.RealmIdentifier("ldap", "oldap-test"); Settings settings = Settings.builder() - .put(buildLdapSettings(realmId, OPEN_LDAP_DNS_URL, userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL)) + .put(buildLdapSettings(realmId, openLdap.getLdapUrl(), userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL)) .put(getFullSettingKey(realmId.getName(), SearchGroupsResolverSettings.FILTER), "(&(objectclass=posixGroup)(memberUid={0}))") .put(getFullSettingKey(realmId.getName(), SearchGroupsResolverSettings.USER_ATTRIBUTE), "uid") .build(); @@ -192,9 +194,10 @@ public void testStandardLdapConnectionHostnameVerificationFailure() throws Excep String groupSearchBase = "ou=people,dc=oldap,dc=test,dc=elasticsearch,dc=com"; String userTemplate = "uid={0},ou=people,dc=oldap,dc=test,dc=elasticsearch,dc=com"; final RealmConfig.RealmIdentifier realmId = new RealmConfig.RealmIdentifier("ldap", "vmode_full"); + String openLdapEsIoURL = "ldaps://127.0.0.1.ip.es.io:" + openLdap.getDefaultPort(); Settings settings = Settings.builder() // The certificate used in the vagrant box is valid for "localhost", but not for "*.ip.es.io" - .put(buildLdapSettings(realmId, OPEN_LDAP_ES_IO_URL, userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL)) + .put(buildLdapSettings(realmId, openLdapEsIoURL, userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL)) .build(); final Environment env = TestEnvironment.newEnvironment(globalSettings); RealmConfig config = new RealmConfig(realmId, settings, env, new ThreadContext(Settings.EMPTY)); @@ -220,7 +223,7 @@ public void testStandardLdapConnectionHostnameVerificationSuccess() throws Excep final RealmConfig.RealmIdentifier realmId = new RealmConfig.RealmIdentifier("ldap", "vmode_full"); Settings settings = Settings.builder() // The certificate used in the vagrant box is valid for "localhost" (but not for "*.ip.es.io") - .put(buildLdapSettings(realmId, OPEN_LDAP_DNS_URL, userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL)) + .put(buildLdapSettings(realmId, openLdap.getLdapUrl(), userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL)) .build(); RealmConfig config = new RealmConfig( @@ -320,7 +323,7 @@ private Settings buildLdapSettings( Settings.Builder builder = Settings.builder() .put(LdapTestCase.buildLdapSettings(realmId, urls, templates, groupSearchBase, scope, null, false)); builder.put(getFullSettingKey(realmId.getName(), SearchGroupsResolverSettings.USER_ATTRIBUTE), "uid"); - return builder.put(SSLConfigurationSettings.TRUSTSTORE_PATH.realm(realmId).getKey(), getDataPath(LDAPTRUST_PATH)) + return builder.put(SSLConfigurationSettings.TRUSTSTORE_PATH.realm(realmId).getKey(), openLdap.getJavaKeyStorePath()) .put(SSLConfigurationSettings.LEGACY_TRUSTSTORE_PASSWORD.realm(realmId).getKey(), "changeit") .put(globalSettings) .put(getFullSettingKey(realmId, RealmSettings.ORDER_SETTING), 0) @@ -340,8 +343,7 @@ private List groups(LdapSession ldapSession) { } private LDAPConnection setupOpenLdapConnection() throws Exception { - Path truststore = getDataPath(LDAPTRUST_PATH); - return LdapTestUtils.openConnection(OpenLdapTests.OPEN_LDAP_DNS_URL, HAWKEYE_DN, OpenLdapTests.PASSWORD, truststore); + return LdapTestUtils.openConnection(openLdap.getLdapUrl(), HAWKEYE_DN, OpenLdapTests.PASSWORD, openLdap.getJavaKeyStorePath()); } private Map resolve(LDAPConnection connection, LdapMetadataResolver resolver) throws Exception { diff --git a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java index eb3365010b550..60bff29bfb58d 100644 --- a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java +++ b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java @@ -6,6 +6,8 @@ */ package org.elasticsearch.xpack.security.authc.ldap; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.MockSecureSettings; @@ -16,6 +18,8 @@ import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.OpenLdapTests; +import org.elasticsearch.test.fixtures.idp.OpenLdapTestContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.security.authc.RealmConfig; @@ -31,6 +35,7 @@ import org.elasticsearch.xpack.security.authc.ldap.support.SessionFactory; import org.junit.After; import org.junit.Before; +import org.junit.ClassRule; import java.nio.file.Path; import java.text.MessageFormat; @@ -44,15 +49,18 @@ import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class OpenLdapUserSearchSessionFactoryTests extends ESTestCase { private Settings globalSettings; private ThreadPool threadPool; - private static final String LDAPCACERT_PATH = "/ca_server.pem"; + + @ClassRule + public static final OpenLdapTestContainer openLdapContainer = new OpenLdapTestContainer(); @Before public void init() { - Path caPath = getDataPath(LDAPCACERT_PATH); + Path caPath = openLdapContainer.getCaCertPath(); /* * Prior to each test we reinitialize the socket factory with a new SSLService so that we get a new SSLContext. * If we re-use an SSLContext, previously connected sessions can get re-established which breaks hostname @@ -79,7 +87,7 @@ public void testUserSearchWithBindUserOpenLDAP() throws Exception { .put( LdapTestCase.buildLdapSettings( realmId, - new String[] { OpenLdapTests.OPEN_LDAP_DNS_URL }, + new String[] { openLdapContainer.getLdapUrl() }, Strings.EMPTY_ARRAY, groupSearchBase, LdapSearchScope.ONE_LEVEL, diff --git a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java index feec06f4b3b6d..4cc12e5af448e 100644 --- a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java +++ b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java @@ -6,15 +6,21 @@ */ package org.elasticsearch.xpack.security.authc.ldap; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; import org.elasticsearch.test.OpenLdapTests; +import org.elasticsearch.test.fixtures.idp.OpenLdapTestContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.ldap.SearchGroupsResolverSettings; import org.elasticsearch.xpack.core.security.authc.ldap.support.LdapSearchScope; import org.elasticsearch.xpack.core.security.support.NoOpLogger; +import org.junit.ClassRule; +import java.nio.file.Path; import java.util.List; import static org.elasticsearch.xpack.core.security.authc.RealmSettings.getFullSettingKey; @@ -24,11 +30,15 @@ import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class SearchGroupsResolverTests extends GroupsResolverTestCase { private static final String BRUCE_BANNER_DN = "uid=hulk,ou=people,dc=oldap,dc=test,dc=elasticsearch,dc=com"; private static final RealmConfig.RealmIdentifier REALM_ID = new RealmConfig.RealmIdentifier("ldap", "my-ldap-realm"); + @ClassRule + public static final OpenLdapTestContainer openLdapContainer = new OpenLdapTestContainer(); + public void testResolveSubTree() throws Exception { Settings settings = Settings.builder() .put(getFullSettingKey(REALM_ID, SearchGroupsResolverSettings.BASE_DN), "dc=oldap,dc=test,dc=elasticsearch,dc=com") @@ -202,7 +212,7 @@ public void testReadBinaryUserAttribute() throws Exception { @Override protected String ldapUrl() { - return OpenLdapTests.OPEN_LDAP_DNS_URL; + return openLdapContainer.getLdapUrl(); } @Override @@ -217,6 +227,12 @@ protected String bindPassword() { @Override protected String trustPath() { - return "/ca.jks"; + return "/openldap/certs/ca.jks"; + } + + @Override + protected void doSetupLdapConnection() throws Exception { + Path trustPath = openLdapContainer.getJavaKeyStorePath(); + this.ldapConnection = LdapTestUtils.openConnection(ldapUrl(), bindDN(), bindPassword(), trustPath); } } diff --git a/x-pack/qa/password-protected-keystore/src/javaRestTest/java/org/elasticsearch/password_protected_keystore/ReloadSecureSettingsWithPasswordProtectedKeystoreRestIT.java b/x-pack/qa/password-protected-keystore/src/javaRestTest/java/org/elasticsearch/password_protected_keystore/ReloadSecureSettingsWithPasswordProtectedKeystoreRestIT.java index 49950b553bb20..0625ec166e32c 100644 --- a/x-pack/qa/password-protected-keystore/src/javaRestTest/java/org/elasticsearch/password_protected_keystore/ReloadSecureSettingsWithPasswordProtectedKeystoreRestIT.java +++ b/x-pack/qa/password-protected-keystore/src/javaRestTest/java/org/elasticsearch/password_protected_keystore/ReloadSecureSettingsWithPasswordProtectedKeystoreRestIT.java @@ -37,12 +37,12 @@ public class ReloadSecureSettingsWithPasswordProtectedKeystoreRestIT extends ESR .nodes(NUM_NODES) .keystorePassword(KEYSTORE_PASSWORD) .name("javaRestTest") + .keystore(nodeSpec -> Map.of("xpack.security.transport.ssl.secure_key_passphrase", "transport-password")) .setting("xpack.security.enabled", "true") .setting("xpack.security.authc.anonymous.roles", "anonymous") .setting("xpack.security.transport.ssl.enabled", "true") .setting("xpack.security.transport.ssl.certificate", "transport.crt") .setting("xpack.security.transport.ssl.key", "transport.key") - .setting("xpack.security.transport.ssl.key_passphrase", "transport-password") .setting("xpack.security.transport.ssl.certificate_authorities", "ca.crt") .rolesFile(Resource.fromClasspath("roles.yml")) .configFile("transport.key", Resource.fromClasspath("ssl/transport.key")) diff --git a/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java b/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java index 703b9e608db17..27250dd4e3367 100644 --- a/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade-multi-cluster/src/test/java/org/elasticsearch/upgrades/CcrRollingUpgradeIT.java @@ -206,7 +206,6 @@ public void testAutoFollowing() throws Exception { } } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103094") public void testCannotFollowLeaderInUpgradedCluster() throws Exception { if (upgradeState != UpgradeState.ALL) { return; diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java index 128fd8b47722f..c7c51a2a96c87 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/AbstractUpgradeTestCase.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.upgrades; +import org.elasticsearch.Build; import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; @@ -38,6 +39,15 @@ protected static boolean isOriginalCluster(String clusterVersion) { return UPGRADE_FROM_VERSION.equals(clusterVersion); } + /** + * Upgrade tests by design are also executed with the same version. We might want to skip some checks if that's the case, see + * for example gh#39102. + * @return true if the cluster version is the current version. + */ + protected static boolean isOriginalClusterCurrent() { + return UPGRADE_FROM_VERSION.equals(Build.current().version()); + } + @Deprecated(forRemoval = true) @UpdateForV9 // Tests should be reworked to rely on features from the current cluster (old, mixed or upgraded). diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ILMHistoryManagedTemplateUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ILMHistoryManagedTemplateUpgradeIT.java new file mode 100644 index 0000000000000..aa177474b81e8 --- /dev/null +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/ILMHistoryManagedTemplateUpgradeIT.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.upgrades; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.test.rest.ObjectPath; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.is; + +public class ILMHistoryManagedTemplateUpgradeIT extends AbstractUpgradeTestCase { + + @SuppressWarnings("unchecked") + public void testEnsureHistoryManagedTemplateIsInstalledOnUpgradedVersion() throws Exception { + if (CLUSTER_TYPE.equals(ClusterType.UPGRADED)) { + assertBusy(() -> { + Request request = new Request("GET", "/_index_template/ilm-history-7"); + try { + Response response = client().performRequest(request); + Map responseMap = entityAsMap(response); + assertNotNull(responseMap); + + List> indexTemplates = (List>) responseMap.get("index_templates"); + assertThat(indexTemplates.size(), is(1)); + assertThat(ObjectPath.evaluate(indexTemplates.get(0), "name"), is("ilm-history-7")); + assertThat(ObjectPath.evaluate(indexTemplates.get(0), "index_template.index_patterns"), is(List.of("ilm-history-7*"))); + } catch (ResponseException e) { + // Not found is fine + assertThat( + "Unexpected failure getting templates: " + e.getResponse().getStatusLine(), + e.getResponse().getStatusLine().getStatusCode(), + is(404) + ); + } + }, 30, TimeUnit.SECONDS); + } + } +} diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java index 657a51dfe1b95..6d34ef5887629 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlAssignmentPlannerUpgradeIT.java @@ -16,6 +16,7 @@ import org.elasticsearch.core.UpdateForV9; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import java.io.IOException; import java.util.ArrayList; @@ -86,7 +87,7 @@ public void testMlAssignmentPlannerUpgrade() throws Exception { // assert correct memory format is used assertOldMemoryFormat("old_memory_format"); - if (isOriginalClusterVersionAtLeast(Version.V_8_11_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.ML_NEW_MEMORY_FORMAT)) { assertNewMemoryFormat("new_memory_format"); } else { assertOldMemoryFormat("new_memory_format"); @@ -102,7 +103,7 @@ public void testMlAssignmentPlannerUpgrade() throws Exception { // assert correct memory format is used assertOldMemoryFormat("old_memory_format"); - if (isOriginalClusterVersionAtLeast(Version.V_8_11_0)) { + if (clusterHasFeature(RestTestLegacyFeatures.ML_NEW_MEMORY_FORMAT)) { assertNewMemoryFormat("new_memory_format"); } else { assertOldMemoryFormat("new_memory_format"); @@ -141,7 +142,7 @@ private void waitForDeploymentStarted(String modelId) throws Exception { @SuppressWarnings("unchecked") private void assertOldMemoryFormat(String modelId) throws Exception { // There was a change in the MEMORY_OVERHEAD value in 8.3.0, see #86416 - long memoryOverheadMb = Version.fromString(UPGRADE_FROM_VERSION).onOrAfter(Version.V_8_2_1) ? 240 : 270; + long memoryOverheadMb = clusterHasFeature(RestTestLegacyFeatures.ML_MEMORY_OVERHEAD_FIXED) ? 240 : 270; var response = getTrainedModelStats(modelId); Map map = entityAsMap(response); List> stats = (List>) map.get("trained_model_stats"); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java index 9913c40dac411..6c1b2be05fd5f 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/MlJobSnapshotUpgradeIT.java @@ -7,7 +7,6 @@ package org.elasticsearch.upgrades; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -76,10 +75,7 @@ public void testSnapshotUpgrader() throws Exception { switch (CLUSTER_TYPE) { case OLD -> createJobAndSnapshots(); case MIXED -> { - assumeTrue( - "We should only test if old cluster is before new cluster", - isOriginalClusterVersionAtLeast(Version.CURRENT) == false - ); + assumeTrue("We should only test if old cluster is before new cluster", isOriginalClusterCurrent() == false); ensureHealth((request -> { request.addParameter("timeout", "70s"); request.addParameter("wait_for_nodes", "3"); @@ -88,10 +84,7 @@ public void testSnapshotUpgrader() throws Exception { testSnapshotUpgradeFailsOnMixedCluster(); } case UPGRADED -> { - assumeTrue( - "We should only test if old cluster is before new cluster", - isOriginalClusterVersionAtLeast(Version.CURRENT) == false - ); + assumeTrue("We should only test if old cluster is before new cluster", isOriginalClusterCurrent() == false); ensureHealth((request -> { request.addParameter("timeout", "70s"); request.addParameter("wait_for_nodes", "3"); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SLMHistoryManagedTemplateUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SLMHistoryManagedTemplateUpgradeIT.java new file mode 100644 index 0000000000000..fed42c35cf5ce --- /dev/null +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/SLMHistoryManagedTemplateUpgradeIT.java @@ -0,0 +1,47 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ +package org.elasticsearch.upgrades; + +import org.elasticsearch.client.Request; +import org.elasticsearch.client.Response; +import org.elasticsearch.client.ResponseException; +import org.elasticsearch.test.rest.ObjectPath; + +import java.util.List; +import java.util.Map; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.Matchers.is; + +public class SLMHistoryManagedTemplateUpgradeIT extends AbstractUpgradeTestCase { + + @SuppressWarnings("unchecked") + public void testEnsureHistoryManagedTemplateIsInstalledOnUpgradedVersion() throws Exception { + if (CLUSTER_TYPE.equals(ClusterType.UPGRADED)) { + assertBusy(() -> { + Request request = new Request("GET", "/_index_template/.slm-history-7"); + try { + Response response = client().performRequest(request); + Map responseMap = entityAsMap(response); + assertNotNull(responseMap); + + List> indexTemplates = (List>) responseMap.get("index_templates"); + assertThat(indexTemplates.size(), is(1)); + assertThat(ObjectPath.evaluate(indexTemplates.get(0), "name"), is(".slm-history-7")); + assertThat(ObjectPath.evaluate(indexTemplates.get(0), "index_template.index_patterns"), is(List.of(".slm-history-7*"))); + } catch (ResponseException e) { + // Not found is fine + assertThat( + "Unexpected failure getting templates: " + e.getResponse().getStatusLine(), + e.getResponse().getStatusLine().getStatusCode(), + is(404) + ); + } + }, 30, TimeUnit.SECONDS); + } + } +} diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java index 8b2fe0d1e2af1..dddba9b7b0fba 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TokenBackwardsCompatibilityIT.java @@ -8,7 +8,6 @@ import org.apache.http.HttpHeaders; import org.apache.http.HttpHost; -import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -17,6 +16,7 @@ import org.elasticsearch.client.WarningsHandler; import org.elasticsearch.core.Strings; import org.elasticsearch.search.SearchHits; +import org.elasticsearch.search.SearchResponseUtils; import org.elasticsearch.test.rest.ObjectPath; import org.junit.After; import org.junit.Before; @@ -440,17 +440,22 @@ private List getAllTokenIds() throws IOException { }"""); final Response searchResponse = client().performRequest(searchRequest); assertOK(searchResponse); - final SearchHits searchHits = SearchResponse.fromXContent(responseAsParser(searchResponse)).getHits(); - assertThat( - "Search request used with size parameter that was too small to fetch all tokens.", - searchHits.getTotalHits().value, - lessThanOrEqualTo(searchSize) - ); - final List tokenIds = Arrays.stream(searchHits.getHits()).map(searchHit -> { - assertNotNull(searchHit.getId()); - return searchHit.getId(); - }).toList(); - assertThat(tokenIds, not(empty())); - return tokenIds; + var response = SearchResponseUtils.responseAsSearchResponse(searchResponse); + try { + final SearchHits searchHits = response.getHits(); + assertThat( + "Search request used with size parameter that was too small to fetch all tokens.", + searchHits.getTotalHits().value, + lessThanOrEqualTo(searchSize) + ); + final List tokenIds = Arrays.stream(searchHits.getHits()).map(searchHit -> { + assertNotNull(searchHit.getId()); + return searchHit.getId(); + }).toList(); + assertThat(tokenIds, not(empty())); + return tokenIds; + } finally { + response.decRef(); + } } } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java index 78ee66fa4d327..87a9911bd80b6 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransformSurvivesUpgradeIT.java @@ -9,7 +9,6 @@ import org.apache.http.HttpHost; import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; -import org.elasticsearch.Build; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; @@ -235,7 +234,7 @@ private void verifyContinuousTransformHandlesData(long expectedLastCheckpoint) t private void verifyUpgradeFailsIfMixedCluster() { // upgrade tests by design are also executed with the same version, this check must be skipped in this case, see gh#39102. - if (isOriginalCluster(Build.current().version())) { + if (isOriginalClusterCurrent()) { return; } final Request upgradeTransformRequest = new Request("POST", getTransformEndpoint() + "_upgrade"); diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java index 3c073605969af..e864a579bd0b0 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/TransportVersionClusterStateUpgradeIT.java @@ -7,29 +7,26 @@ package org.elasticsearch.upgrades; +import org.elasticsearch.Build; import org.elasticsearch.TransportVersion; -import org.elasticsearch.TransportVersions; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.common.util.Maps; import org.elasticsearch.test.rest.ObjectPath; +import org.elasticsearch.test.rest.RestTestLegacyFeatures; import java.util.Map; +import static org.elasticsearch.cluster.ClusterState.INFERRED_TRANSPORT_VERSION; +import static org.elasticsearch.cluster.ClusterState.VERSION_INTRODUCING_TRANSPORT_VERSIONS; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.everyItem; import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.oneOf; public class TransportVersionClusterStateUpgradeIT extends AbstractUpgradeTestCase { - private static final Version VERSION_INTRODUCING_TRANSPORT_VERSIONS = Version.V_8_8_0; - private static final Version VERSION_INTRODUCING_NODES_VERSIONS = Version.V_8_11_0; - private static final TransportVersion FIRST_TRANSPORT_VERSION = TransportVersions.V_8_8_0; - public void testReadsInferredTransportVersions() throws Exception { - assertEquals(VERSION_INTRODUCING_TRANSPORT_VERSIONS.id(), FIRST_TRANSPORT_VERSION.id()); - // waitUntil because the versions fixup on upgrade happens in the background so may need a retry assertTrue(waitUntil(() -> { try { @@ -53,9 +50,9 @@ private boolean runTransportVersionsTest() throws Exception { final var description = clusterState.toString(); final var nodeIds = clusterState.evaluateMapKeys("nodes"); - final Map versionsByNodeId = Maps.newHashMapWithExpectedSize(nodeIds.size()); + final Map versionsByNodeId = Maps.newHashMapWithExpectedSize(nodeIds.size()); for (final var nodeId : nodeIds) { - versionsByNodeId.put(nodeId, Version.fromString(clusterState.evaluate("nodes." + nodeId + ".version"))); + versionsByNodeId.put(nodeId, clusterState.evaluate("nodes." + nodeId + ".version")); } final var hasTransportVersions = clusterState.evaluate("transport_versions") != null; @@ -64,11 +61,11 @@ private boolean runTransportVersionsTest() throws Exception { switch (CLUSTER_TYPE) { case OLD -> { - if (isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_TRANSPORT_VERSIONS) == false) { + if (clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED) == false) { // Before 8.8.0 there was only DiscoveryNode#version assertFalse(description, hasTransportVersions); assertFalse(description, hasNodesVersions); - } else if (isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_NODES_VERSIONS) == false) { + } else if (clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION) == false) { // In [8.8.0, 8.11.0) we exposed just transport_versions assertTrue(description, hasTransportVersions); assertFalse(description, hasNodesVersions); @@ -79,10 +76,10 @@ private boolean runTransportVersionsTest() throws Exception { } } case MIXED -> { - if (isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_TRANSPORT_VERSIONS) == false) { + if (clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED) == false) { // Responding node might be <8.8.0 (so no extra versions) or >=8.11.0 (includes nodes_versions) assertFalse(description, hasTransportVersions); - } else if (isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_NODES_VERSIONS) == false) { + } else if (clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION) == false) { // Responding node might be in [8.8.0, 8.11.0) (transport_versions) or >=8.11.0 (includes nodes_versions) but not both assertTrue(description, hasNodesVersions || hasTransportVersions); } else { @@ -95,14 +92,14 @@ private boolean runTransportVersionsTest() throws Exception { // All nodes are Version.CURRENT, ≥8.11.0, so we definitely have nodes_versions assertFalse(description, hasTransportVersions); assertTrue(description, hasNodesVersions); - assertThat(description, versionsByNodeId.values(), everyItem(equalTo(Version.CURRENT))); + assertThat(description, versionsByNodeId.values(), everyItem(equalTo(Build.current().version()))); } } if (hasTransportVersions) { // Upgrading from [8.8.0, 8.11.0) and the responding node is still on the old version - assertFalse(description, isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_NODES_VERSIONS)); - assertTrue(description, isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_TRANSPORT_VERSIONS)); + assertFalse(description, clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION)); + assertTrue(description, clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED)); assertNotEquals(description, ClusterType.UPGRADED, CLUSTER_TYPE); // transport_versions includes the correct version for all nodes, no inference is needed @@ -114,19 +111,19 @@ private boolean runTransportVersionsTest() throws Exception { final var transportVersion = TransportVersion.fromString(clusterState.evaluate(path + ".transport_version")); final var nodeVersion = versionsByNodeId.get(nodeId); assertNotNull(nodeDescription, nodeVersion); - if (nodeVersion.equals(Version.CURRENT)) { + if (nodeVersion.equals(Build.current().version())) { assertEquals(nodeDescription, TransportVersion.current(), transportVersion); - } else if (nodeVersion.after(VERSION_INTRODUCING_TRANSPORT_VERSIONS)) { - assertThat(nodeDescription, transportVersion, greaterThan(FIRST_TRANSPORT_VERSION)); } else { - assertEquals(nodeDescription, FIRST_TRANSPORT_VERSION, transportVersion); + // There's no relationship between node versions and transport versions anymore, although we can be sure of this: + assertThat(nodeDescription, transportVersion, greaterThanOrEqualTo(INFERRED_TRANSPORT_VERSION)); } } } else if (hasNodesVersions) { // Either upgrading from ≥8.11.0 (the responding node might be old or new), or from <8.8.0 (the responding node is new) assertFalse( description, - isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_NODES_VERSIONS) == false && CLUSTER_TYPE == ClusterType.OLD + clusterHasFeature(RestTestLegacyFeatures.STATE_REPLACED_TRANSPORT_VERSION_WITH_NODES_VERSION) == false + && CLUSTER_TYPE == ClusterType.OLD ); // nodes_versions includes _a_ version for all nodes; it might be correct, or it might be inferred if we're upgrading from @@ -139,27 +136,32 @@ private boolean runTransportVersionsTest() throws Exception { final var transportVersion = TransportVersion.fromString(clusterState.evaluate(path + ".transport_version")); final var nodeVersion = versionsByNodeId.get(nodeId); assertNotNull(nodeDescription, nodeVersion); - if (nodeVersion.equals(Version.CURRENT)) { + if (nodeVersion.equals(Build.current().version())) { // Either the responding node is upgraded or the upgrade is trivial; if the responding node is upgraded but the master // is not then its transport version may be temporarily inferred as 8.8.0 until TransportVersionsFixupListener runs. assertThat( nodeDescription, transportVersion, - isOriginalClusterVersionAtLeast(VERSION_INTRODUCING_TRANSPORT_VERSIONS) + clusterHasFeature(RestTestLegacyFeatures.TRANSPORT_VERSION_SUPPORTED) ? equalTo(TransportVersion.current()) - : oneOf(TransportVersion.current(), FIRST_TRANSPORT_VERSION) + : oneOf(TransportVersion.current(), INFERRED_TRANSPORT_VERSION) ); - if (CLUSTER_TYPE == ClusterType.UPGRADED && transportVersion.equals(FIRST_TRANSPORT_VERSION)) { + if (CLUSTER_TYPE == ClusterType.UPGRADED && transportVersion.equals(INFERRED_TRANSPORT_VERSION)) { // TransportVersionsFixupListener should run soon, retry logger.info("{} - not fixed up yet, retrying", nodeDescription); return false; } - } else if (nodeVersion.after(VERSION_INTRODUCING_TRANSPORT_VERSIONS)) { - // There's no relationship between node versions and transport versions any more, although we can be sure of this: - assertThat(nodeDescription, transportVersion, greaterThan(FIRST_TRANSPORT_VERSION)); } else { - // Responding node is not upgraded, and no later than 8.8.0, so we infer its version correctly. - assertEquals(nodeDescription, TransportVersion.fromId(nodeVersion.id()), transportVersion); + var version = parseLegacyVersion(nodeVersion); + // All non-semantic versions are after 8.8.0 and have transport version + var transportVersionIntroduced = version.map(v -> v.after(VERSION_INTRODUCING_TRANSPORT_VERSIONS)).orElse(true); + if (transportVersionIntroduced) { + // There's no relationship between node versions and transport versions anymore, although we can be sure of this: + assertThat(nodeDescription, transportVersion, greaterThan(INFERRED_TRANSPORT_VERSION)); + } else { + // Responding node is not upgraded, and no later than 8.8.0, so we infer its version correctly. + assertEquals(nodeDescription, TransportVersion.fromId(version.get().id()), transportVersion); + } } } } diff --git a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java index 59acb7722085f..d7d2676163851 100644 --- a/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java +++ b/x-pack/qa/rolling-upgrade/src/test/java/org/elasticsearch/upgrades/UpgradeClusterClientYamlTestSuiteIT.java @@ -10,7 +10,6 @@ import com.carrotsearch.randomizedtesting.annotations.TimeoutSuite; import org.apache.lucene.tests.util.TimeUnits; -import org.elasticsearch.Version; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.settings.Settings; @@ -28,6 +27,7 @@ import java.util.Map; import java.util.concurrent.TimeUnit; +import static org.elasticsearch.test.rest.RestTestLegacyFeatures.COMPONENT_TEMPLATE_SUPPORTED; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; @@ -43,7 +43,7 @@ public class UpgradeClusterClientYamlTestSuiteIT extends ESClientYamlSuiteTestCa public void waitForTemplates() throws Exception { if (AbstractUpgradeTestCase.CLUSTER_TYPE == AbstractUpgradeTestCase.ClusterType.OLD) { try { - boolean clusterUnderstandsComposableTemplates = AbstractUpgradeTestCase.isOriginalClusterVersionAtLeast(Version.V_7_8_0); + boolean clusterUnderstandsComposableTemplates = clusterHasFeature(COMPONENT_TEMPLATE_SUPPORTED); XPackRestTestHelper.waitForTemplates( client(), XPackRestTestConstants.ML_POST_V7120_TEMPLATES, diff --git a/x-pack/qa/saml-idp-tests/build.gradle b/x-pack/qa/saml-idp-tests/build.gradle index 6027a421b62f2..6a7d60f88a1d7 100644 --- a/x-pack/qa/saml-idp-tests/build.gradle +++ b/x-pack/qa/saml-idp-tests/build.gradle @@ -1,46 +1,13 @@ -import org.elasticsearch.gradle.LazyPropertyMap - -Project idpFixtureProject = project(':x-pack:test:idp-fixture') - apply plugin: 'elasticsearch.internal-java-rest-test' -apply plugin: 'elasticsearch.test.fixtures' dependencies { javaRestTestImplementation testArtifact(project(xpackModule('core'))) javaRestTestImplementation "com.google.jimfs:jimfs:${versions.jimfs}" javaRestTestImplementation "com.google.guava:guava:${versions.jimfs_guava}" + javaRestTestImplementation project(":x-pack:test:idp-fixture") } -testFixtures.useFixture ":x-pack:test:idp-fixture" - -String outputDir = "${project.buildDir}/generated-resources/${project.name}" -tasks.register("copyIdpFiles", Sync) { - dependsOn idpFixtureProject.postProcessFixture - // Don't attempt to get ephemeral ports when Docker is not available - onlyIf(idpFixtureProject.postProcessFixture.path + " not skipped") { - idpFixtureProject.postProcessFixture.state.skipped == false - } - from idpFixtureProject.files('idp/shibboleth-idp/credentials/idp-browser.pem', 'idp/shibboleth-idp/metadata/idp-metadata.xml', - 'idp/shibboleth-idp/credentials/sp-signing.key', 'idp/shibboleth-idp/credentials/sp-signing.crt'); - into outputDir - def expandProps = new LazyPropertyMap<>("lazy port config") - expandProps.put("port", () -> idpFixtureProject.postProcessFixture.ext."test.fixtures.shibboleth-idp.tcp.4443") - inputs.properties(expandProps) - filesMatching("idp-metadata.xml") { - expand(expandProps) - } -} - -normalization { - runtimeClasspath { - ignore 'idp-metadata.xml' - } -} tasks.named("javaRestTest").configure { usesDefaultDistribution() - classpath += files(tasks.named("copyIdpFiles")) - onlyIf(idpFixtureProject.postProcessFixture.path + " not skipped") { - idpFixtureProject.postProcessFixture.state.skipped == false - } } diff --git a/x-pack/qa/saml-idp-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticationIT.java b/x-pack/qa/saml-idp-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticationIT.java index 625c0ffae167e..5718930f37c82 100644 --- a/x-pack/qa/saml-idp-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticationIT.java +++ b/x-pack/qa/saml-idp-tests/src/javaRestTest/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticationIT.java @@ -6,6 +6,8 @@ */ package org.elasticsearch.xpack.security.authc.saml; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; @@ -26,6 +28,7 @@ import org.apache.http.protocol.HttpContext; import org.apache.http.protocol.HttpCoreContext; import org.apache.http.util.EntityUtils; +import org.apache.lucene.tests.util.LuceneTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; @@ -42,6 +45,9 @@ import org.elasticsearch.test.cluster.ElasticsearchCluster; import org.elasticsearch.test.cluster.local.distribution.DistributionType; import org.elasticsearch.test.cluster.util.resource.Resource; +import org.elasticsearch.test.fixtures.idp.IdpTestContainer; +import org.elasticsearch.test.fixtures.idp.OpenLdapTestContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentFactory; @@ -52,6 +58,10 @@ import org.hamcrest.Matchers; import org.junit.Before; import org.junit.ClassRule; +import org.junit.rules.RuleChain; +import org.junit.rules.TestRule; +import org.testcontainers.containers.Network; +import org.testcontainers.shaded.org.apache.commons.io.IOUtils; import java.io.IOException; import java.io.InputStream; @@ -82,13 +92,18 @@ /** * An integration test for validating SAML authentication against a real Identity Provider (Shibboleth) */ +@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/103717") +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class SamlAuthenticationIT extends ESRestTestCase { private static final String SAML_RESPONSE_FIELD = "SAMLResponse"; private static final String KIBANA_PASSWORD = "K1b@na K1b@na K1b@na"; - @ClassRule - public static ElasticsearchCluster cluster = ElasticsearchCluster.local() + private static Network network = Network.newNetwork(); + private static OpenLdapTestContainer openLdapTestContainer = new OpenLdapTestContainer(network); + private static IdpTestContainer idpFixture = new IdpTestContainer(network); + + private static ElasticsearchCluster cluster = ElasticsearchCluster.local() .distribution(DistributionType.DEFAULT) .setting("xpack.license.self_generated.type", "trial") .setting("xpack.security.enabled", "true") @@ -131,12 +146,25 @@ public class SamlAuthenticationIT extends ESRestTestCase { .setting("xpack.security.authc.realms.native.native.order", "4") .setting("xpack.ml.enabled", "false") .setting("logger.org.elasticsearch.xpack.security", "TRACE") - .configFile("sp-signing.key", Resource.fromClasspath("sp-signing.key")) - .configFile("idp-metadata.xml", Resource.fromClasspath("idp-metadata.xml")) - .configFile("sp-signing.crt", Resource.fromClasspath("sp-signing.crt")) + .configFile("sp-signing.key", Resource.fromClasspath("/idp/shibboleth-idp/credentials/sp-signing.key")) + .configFile("idp-metadata.xml", Resource.fromString(SamlAuthenticationIT::calculateIdpMetaData)) + .configFile("sp-signing.crt", Resource.fromClasspath("/idp/shibboleth-idp/credentials/sp-signing.crt")) .user("test_admin", "x-pack-test-password") .build(); + @ClassRule + public static TestRule ruleChain = RuleChain.outerRule(network).around(openLdapTestContainer).around(idpFixture).around(cluster); + + private static String calculateIdpMetaData() { + Resource resource = Resource.fromClasspath("/idp/shibboleth-idp/metadata/idp-metadata.xml"); + try (InputStream stream = resource.asStream()) { + String metadata = IOUtils.toString(stream, "UTF-8"); + return metadata.replace("${port}", String.valueOf(idpFixture.getDefaultPort())); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + @Override protected String getTestRestCluster() { return cluster.getHttpAddresses(); @@ -526,7 +554,7 @@ private CloseableHttpClient getHttpClient() throws Exception { } private SSLContext getClientSslContext() throws Exception { - final Path pem = getDataPath("/idp-browser.pem"); + final Path pem = idpFixture.getBrowserPem(); final X509ExtendedTrustManager trustManager = CertParsingUtils.getTrustManagerFromPEM(List.of(pem)); SSLContext context = SSLContext.getInstance("TLS"); context.init(new KeyManager[0], new TrustManager[] { trustManager }, new SecureRandom()); diff --git a/x-pack/qa/third-party/active-directory/build.gradle b/x-pack/qa/third-party/active-directory/build.gradle index f5c4e6d63d37c..5156d20dd1d12 100644 --- a/x-pack/qa/third-party/active-directory/build.gradle +++ b/x-pack/qa/third-party/active-directory/build.gradle @@ -1,12 +1,15 @@ apply plugin: 'elasticsearch.standalone-test' -apply plugin: 'elasticsearch.test.fixtures' +configurations.all { + exclude group: 'org.slf4j', module: 'slf4j-nop' +} dependencies { + testImplementation project(':test:framework') testImplementation project(xpackModule('core')) testImplementation project(xpackModule('security')) - testImplementation(testArtifact(project(xpackModule('security'))))} - -testFixtures.useFixture ":x-pack:test:smb-fixture" + testImplementation(testArtifact(project(xpackModule('security')))) + testImplementation project(":x-pack:test:smb-fixture") +} // add test resources from security, so tests can use example certs tasks.named("processTestResources").configure { @@ -23,6 +26,7 @@ tasks.named("forbiddenPatterns").configure { } tasks.named("test").configure { + systemProperty 'tests.security.manager', 'false' include '**/*IT.class' include '**/*Tests.class' } diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java index 26e0121b92a7d..d2443720de5ce 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java @@ -63,7 +63,7 @@ public void testUserSearchWithActiveDirectory() throws Exception { String groupSearchBase = "DC=ad,DC=test,DC=elasticsearch,DC=com"; String userSearchBase = "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"; Settings settings = Settings.builder() - .put("url", ActiveDirectorySessionFactoryTests.AD_LDAP_URL) + .put("url", smbFixture.getAdLdapUrl()) .put("group_search.base_dn", groupSearchBase) .put("user_search.base_dn", userSearchBase) .put("bind_dn", "ironman@ad.test.elasticsearch.com") diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java index 9ab6b5a309393..ff68d879d8a8f 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java @@ -6,6 +6,7 @@ */ package org.elasticsearch.xpack.security.authc.ldap; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import com.unboundid.ldap.sdk.LDAPConnection; import com.unboundid.ldap.sdk.LDAPConnectionPool; import com.unboundid.ldap.sdk.LDAPException; @@ -18,6 +19,8 @@ import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.fixtures.smb.SmbTestContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.ldap.ActiveDirectorySessionFactorySettings; import org.elasticsearch.xpack.core.security.authc.ldap.support.LdapSearchScope; @@ -25,6 +28,7 @@ import org.elasticsearch.xpack.core.ssl.SSLConfigurationSettings; import org.elasticsearch.xpack.core.ssl.SSLService; import org.junit.Before; +import org.junit.ClassRule; import java.io.IOException; import java.nio.file.FileVisitResult; @@ -39,8 +43,11 @@ import static org.elasticsearch.xpack.core.security.authc.RealmSettings.getFullSettingKey; +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public abstract class AbstractActiveDirectoryTestCase extends ESTestCase { + @ClassRule + public static final SmbTestContainer smbFixture = new SmbTestContainer(); // follow referrals defaults to false here which differs from the default value of the setting // this is needed to prevent test logs being filled by errors as the default configuration of // the tests run against a vagrant samba4 instance configured as a domain controller with the @@ -48,14 +55,7 @@ public abstract class AbstractActiveDirectoryTestCase extends ESTestCase { // as we cannot control the URL of the referral which may contain a non-resolvable DNS name as // this name would be served by the samba4 instance public static final Boolean FOLLOW_REFERRALS = Booleans.parseBoolean(getFromEnv("TESTS_AD_FOLLOW_REFERRALS", "false")); - public static final String AD_LDAP_URL = getFromEnv("TESTS_AD_LDAP_URL", "ldaps://localhost:" + getFromProperty("636")); - public static final String AD_LDAP_GC_URL = getFromEnv("TESTS_AD_LDAP_GC_URL", "ldaps://localhost:" + getFromProperty("3269")); - public static final String PASSWORD = getFromEnv("TESTS_AD_USER_PASSWORD", "Passw0rd"); - public static final String AD_LDAP_PORT = getFromEnv("TESTS_AD_LDAP_PORT", getFromProperty("389")); - - public static final String AD_LDAPS_PORT = getFromEnv("TESTS_AD_LDAPS_PORT", getFromProperty("636")); - public static final String AD_GC_LDAP_PORT = getFromEnv("TESTS_AD_GC_LDAP_PORT", getFromProperty("3268")); - public static final String AD_GC_LDAPS_PORT = getFromEnv("TESTS_AD_GC_LDAPS_PORT", getFromProperty("3269")); + public static final String PASSWORD = "Passw0rd"; public static final String AD_DOMAIN = "ad.test.elasticsearch.com"; protected SSLService sslService; @@ -108,10 +108,6 @@ Settings buildAdSettings( .put(getFullSettingKey(realmId, ActiveDirectorySessionFactorySettings.AD_DOMAIN_NAME_SETTING), adDomainName) .put(getFullSettingKey(realmName, ActiveDirectorySessionFactorySettings.AD_USER_SEARCH_BASEDN_SETTING), userSearchDN) .put(getFullSettingKey(realmName, ActiveDirectorySessionFactorySettings.AD_USER_SEARCH_SCOPE_SETTING), scope) - .put(getFullSettingKey(realmName, ActiveDirectorySessionFactorySettings.AD_LDAP_PORT_SETTING), AD_LDAP_PORT) - .put(getFullSettingKey(realmName, ActiveDirectorySessionFactorySettings.AD_LDAPS_PORT_SETTING), AD_LDAPS_PORT) - .put(getFullSettingKey(realmName, ActiveDirectorySessionFactorySettings.AD_GC_LDAP_PORT_SETTING), AD_GC_LDAP_PORT) - .put(getFullSettingKey(realmName, ActiveDirectorySessionFactorySettings.AD_GC_LDAPS_PORT_SETTING), AD_GC_LDAPS_PORT) .put(getFullSettingKey(realmId, SessionFactorySettings.FOLLOW_REFERRALS_SETTING), FOLLOW_REFERRALS) .putList(getFullSettingKey(realmId, SSLConfigurationSettings.CAPATH_SETTING_REALM), certificatePaths); if (randomBoolean()) { @@ -153,11 +149,4 @@ private static String getFromEnv(String envVar, String defaultValue) { final String value = System.getenv(envVar); return value == null ? defaultValue : value; } - - private static String getFromProperty(String port) { - String key = "test.fixtures.smb-fixture.tcp." + port; - final String value = System.getProperty(key); - assertNotNull("Expected the actual value for port " + port + " to be in system property " + key, value); - return value; - } } diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java index 1af08ffd5fafe..3d9e7f3828bc7 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java @@ -6,6 +6,8 @@ */ package org.elasticsearch.xpack.security.authc.ldap; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; + import org.apache.logging.log4j.LogManager; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchSecurityException; @@ -21,18 +23,20 @@ import org.elasticsearch.core.Nullable; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecurityIntegTestCase; +import org.elasticsearch.test.fixtures.smb.SmbTestContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.xcontent.XContentType; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.action.user.AuthenticateAction; import org.elasticsearch.xpack.core.security.action.user.AuthenticateRequest; import org.elasticsearch.xpack.core.security.action.user.AuthenticateResponse; -import org.elasticsearch.xpack.core.security.authc.ldap.ActiveDirectorySessionFactorySettings; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; +import org.junit.ClassRule; import java.io.IOException; import java.nio.file.Path; @@ -47,14 +51,9 @@ import java.util.stream.Collectors; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.elasticsearch.xpack.core.security.authc.RealmSettings.getFullSettingKey; import static org.elasticsearch.xpack.core.security.authc.ldap.support.LdapSearchScope.ONE_LEVEL; import static org.elasticsearch.xpack.core.security.authc.ldap.support.LdapSearchScope.SUB_TREE; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.BASIC_AUTH_HEADER; -import static org.elasticsearch.xpack.security.authc.ldap.AbstractActiveDirectoryTestCase.AD_GC_LDAPS_PORT; -import static org.elasticsearch.xpack.security.authc.ldap.AbstractActiveDirectoryTestCase.AD_GC_LDAP_PORT; -import static org.elasticsearch.xpack.security.authc.ldap.AbstractActiveDirectoryTestCase.AD_LDAPS_PORT; -import static org.elasticsearch.xpack.security.authc.ldap.AbstractActiveDirectoryTestCase.AD_LDAP_PORT; import static org.elasticsearch.xpack.security.test.SecurityTestUtils.writeFile; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -63,6 +62,7 @@ * This test assumes all subclass tests will be of type SUITE. It picks a random realm configuration for the tests, and * writes a group to role mapping file for each node. */ +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public abstract class AbstractAdLdapRealmTestCase extends SecurityIntegTestCase { public static final String XPACK_SECURITY_AUTHC_REALMS_AD_EXTERNAL = "xpack.security.authc.realms.active_directory.external"; @@ -72,6 +72,9 @@ public abstract class AbstractAdLdapRealmTestCase extends SecurityIntegTestCase public static final String PHILANTHROPISTS_INDEX = "philanthropists"; public static final String SECURITY_INDEX = "security"; + @ClassRule + public static final SmbTestContainer smbFixture = new SmbTestContainer(); + private static final RoleMappingEntry[] AD_ROLE_MAPPING = new RoleMappingEntry[] { new RoleMappingEntry("SHIELD: [ \"CN=SHIELD,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com\" ]", """ { @@ -359,12 +362,8 @@ enum RealmConfig { .put(XPACK_SECURITY_AUTHC_REALMS_AD_EXTERNAL + ".domain_name", ActiveDirectorySessionFactoryTests.AD_DOMAIN) .put(XPACK_SECURITY_AUTHC_REALMS_AD_EXTERNAL + ".group_search.base_dn", "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com") .put(XPACK_SECURITY_AUTHC_REALMS_AD_EXTERNAL + ".group_search.scope", randomBoolean() ? SUB_TREE : ONE_LEVEL) - .put(XPACK_SECURITY_AUTHC_REALMS_AD_EXTERNAL + ".url", ActiveDirectorySessionFactoryTests.AD_LDAP_URL) + .put(XPACK_SECURITY_AUTHC_REALMS_AD_EXTERNAL + ".url", smbFixture.getAdLdapUrl()) .put(XPACK_SECURITY_AUTHC_REALMS_AD_EXTERNAL + ".follow_referrals", ActiveDirectorySessionFactoryTests.FOLLOW_REFERRALS) - .put(getFullSettingKey("external", ActiveDirectorySessionFactorySettings.AD_LDAP_PORT_SETTING), AD_LDAP_PORT) - .put(getFullSettingKey("external", ActiveDirectorySessionFactorySettings.AD_LDAPS_PORT_SETTING), AD_LDAPS_PORT) - .put(getFullSettingKey("external", ActiveDirectorySessionFactorySettings.AD_GC_LDAP_PORT_SETTING), AD_GC_LDAP_PORT) - .put(getFullSettingKey("external", ActiveDirectorySessionFactorySettings.AD_GC_LDAPS_PORT_SETTING), AD_GC_LDAPS_PORT) .build(), "active_directory" ), @@ -373,7 +372,7 @@ enum RealmConfig { true, AD_ROLE_MAPPING, Settings.builder() - .put(XPACK_SECURITY_AUTHC_REALMS_LDAP_EXTERNAL + ".url", ActiveDirectorySessionFactoryTests.AD_LDAP_URL) + .put(XPACK_SECURITY_AUTHC_REALMS_LDAP_EXTERNAL + ".url", smbFixture.getAdLdapUrl()) .put(XPACK_SECURITY_AUTHC_REALMS_LDAP_EXTERNAL + ".group_search.base_dn", "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com") .put(XPACK_SECURITY_AUTHC_REALMS_LDAP_EXTERNAL + ".group_search.scope", randomBoolean() ? SUB_TREE : ONE_LEVEL) .putList( @@ -389,7 +388,7 @@ enum RealmConfig { true, AD_ROLE_MAPPING, Settings.builder() - .put(XPACK_SECURITY_AUTHC_REALMS_LDAP_EXTERNAL + ".url", ActiveDirectorySessionFactoryTests.AD_LDAP_URL) + .put(XPACK_SECURITY_AUTHC_REALMS_LDAP_EXTERNAL + ".url", smbFixture.getAdLdapUrl()) .putList( XPACK_SECURITY_AUTHC_REALMS_LDAP_EXTERNAL + ".user_dn_templates", "cn={0},CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com" diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java index d8f82c6419501..231bf47e3e712 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java @@ -6,15 +6,19 @@ */ package org.elasticsearch.xpack.security.authc.ldap; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import com.unboundid.ldap.sdk.Filter; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.fixtures.smb.SmbTestContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.ldap.support.LdapSearchScope; import org.elasticsearch.xpack.core.security.support.NoOpLogger; import org.junit.Before; +import org.junit.ClassRule; import java.util.List; import java.util.regex.Pattern; @@ -24,12 +28,16 @@ import static org.hamcrest.Matchers.hasItem; import static org.hamcrest.Matchers.is; +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class ActiveDirectoryGroupsResolverTests extends GroupsResolverTestCase { private static final String BRUCE_BANNER_DN = "cn=Bruce Banner,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"; private static final RealmConfig.RealmIdentifier REALM_ID = new RealmConfig.RealmIdentifier("active_directory", "ad"); + @ClassRule + public static final SmbTestContainer smbFixture = new SmbTestContainer(); + @Before public void setReferralFollowing() { ldapConnection.getConnectionOptions().setFollowReferrals(AbstractActiveDirectoryTestCase.FOLLOW_REFERRALS); @@ -145,7 +153,7 @@ private void assertValidSidQuery(Filter query, String[] expectedSids) { @Override protected String ldapUrl() { - return ActiveDirectorySessionFactoryTests.AD_LDAP_URL; + return smbFixture.getAdLdapUrl(); } @Override diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java index 120a27c944bd8..28637560d9d53 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java @@ -75,7 +75,11 @@ public boolean enableWarningsCheck() { } public void testAdAuth() throws Exception { - RealmConfig config = configureRealm("ad-test", LdapRealmSettings.AD_TYPE, buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false)); + RealmConfig config = configureRealm( + "ad-test", + LdapRealmSettings.AD_TYPE, + buildAdSettings(smbFixture.getAdLdapUrl(), AD_DOMAIN, false) + ); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { String userName = "ironman"; @@ -115,7 +119,7 @@ private RealmConfig configureRealm(String name, String type, Settings settings) } public void testNetbiosAuth() throws Exception { - final String adUrl = randomFrom(AD_LDAP_URL, AD_LDAP_GC_URL); + final String adUrl = randomFrom(smbFixture.getAdLdapUrl(), smbFixture.getAdLdapGcUrl()); RealmConfig config = configureRealm("ad-test", LdapRealmSettings.AD_TYPE, buildAdSettings(adUrl, AD_DOMAIN, false)); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { @@ -142,7 +146,11 @@ public void testNetbiosAuth() throws Exception { } public void testAdAuthAvengers() throws Exception { - RealmConfig config = configureRealm("ad-test", LdapRealmSettings.AD_TYPE, buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false)); + RealmConfig config = configureRealm( + "ad-test", + LdapRealmSettings.AD_TYPE, + buildAdSettings(smbFixture.getAdLdapUrl(), AD_DOMAIN, false) + ); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { String[] users = new String[] { "cap", "hawkeye", "hulk", "ironman", "thor", "blackwidow" }; @@ -158,7 +166,7 @@ public void testAdAuthAvengers() throws Exception { public void testAuthenticate() throws Exception { Settings settings = buildAdSettings( REALM_ID, - AD_LDAP_URL, + smbFixture.getAdLdapUrl(), AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.ONE_LEVEL, @@ -191,7 +199,7 @@ public void testAuthenticate() throws Exception { public void testAuthenticateBaseUserSearch() throws Exception { Settings settings = buildAdSettings( REALM_ID, - AD_LDAP_URL, + smbFixture.getAdLdapUrl(), AD_DOMAIN, "CN=Bruce Banner, CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.BASE, @@ -226,7 +234,7 @@ public void testAuthenticateBaseGroupSearch() throws Exception { .put( buildAdSettings( REALM_ID, - AD_LDAP_URL, + smbFixture.getAdLdapUrl(), AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.ONE_LEVEL, @@ -255,7 +263,7 @@ public void testAuthenticateBaseGroupSearch() throws Exception { public void testAuthenticateWithUserPrincipalName() throws Exception { Settings settings = buildAdSettings( REALM_ID, - AD_LDAP_URL, + smbFixture.getAdLdapUrl(), AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.ONE_LEVEL, @@ -281,7 +289,7 @@ public void testAuthenticateWithUserPrincipalName() throws Exception { public void testAuthenticateWithSAMAccountName() throws Exception { Settings settings = buildAdSettings( REALM_ID, - AD_LDAP_URL, + smbFixture.getAdLdapUrl(), AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.ONE_LEVEL, @@ -310,7 +318,7 @@ public void testCustomUserFilter() throws Exception { .put( buildAdSettings( REALM_ID, - AD_LDAP_URL, + smbFixture.getAdLdapUrl(), AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.SUB_TREE, @@ -349,7 +357,7 @@ public void testStandardLdapConnection() throws Exception { .put( LdapTestCase.buildLdapSettings( realmId, - new String[] { AD_LDAP_URL }, + new String[] { smbFixture.getAdLdapUrl() }, new String[] { userTemplate }, groupSearchBase, LdapSearchScope.SUB_TREE, @@ -389,7 +397,7 @@ public void testHandlingLdapReferralErrors() throws Exception { .put( LdapTestCase.buildLdapSettings( realmId, - new String[] { AD_LDAP_URL }, + new String[] { smbFixture.getAdLdapUrl() }, new String[] { userTemplate }, groupSearchBase, LdapSearchScope.SUB_TREE, @@ -423,7 +431,7 @@ public void testStandardLdapWithAttributeGroups() throws Exception { .put( LdapTestCase.buildLdapSettings( realmId, - new String[] { AD_LDAP_URL }, + new String[] { smbFixture.getAdLdapUrl() }, new String[] { userTemplate }, groupSearchBase, LdapSearchScope.SUB_TREE, @@ -456,7 +464,11 @@ public void testStandardLdapWithAttributeGroups() throws Exception { } public void testADLookup() throws Exception { - RealmConfig config = configureRealm("ad-test", LdapRealmSettings.AD_TYPE, buildAdSettings(AD_LDAP_URL, AD_DOMAIN, false, true)); + RealmConfig config = configureRealm( + "ad-test", + LdapRealmSettings.AD_TYPE, + buildAdSettings(smbFixture.getAdLdapUrl(), AD_DOMAIN, false, true) + ); try (ActiveDirectorySessionFactory sessionFactory = getActiveDirectorySessionFactory(config, sslService, threadPool)) { List users = randomSubsetOf( @@ -499,7 +511,7 @@ public void testResolveTokenGroupsSID() throws Exception { .put( buildAdSettings( REALM_ID, - AD_LDAP_URL, + smbFixture.getAdLdapUrl(), AD_DOMAIN, "CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com", LdapSearchScope.SUB_TREE, @@ -536,10 +548,6 @@ private Settings buildAdSettings(String ldapUrl, String adDomainName, boolean ho Settings.Builder builder = Settings.builder() .put(getFullSettingKey(REALM_ID, SessionFactorySettings.URLS_SETTING), ldapUrl) .put(getFullSettingKey(REALM_ID, ActiveDirectorySessionFactorySettings.AD_DOMAIN_NAME_SETTING), adDomainName) - .put(getFullSettingKey(REALM_NAME, ActiveDirectorySessionFactorySettings.AD_LDAP_PORT_SETTING), AD_LDAP_PORT) - .put(getFullSettingKey(REALM_NAME, ActiveDirectorySessionFactorySettings.AD_LDAPS_PORT_SETTING), AD_LDAPS_PORT) - .put(getFullSettingKey(REALM_NAME, ActiveDirectorySessionFactorySettings.AD_GC_LDAP_PORT_SETTING), AD_GC_LDAP_PORT) - .put(getFullSettingKey(REALM_NAME, ActiveDirectorySessionFactorySettings.AD_GC_LDAPS_PORT_SETTING), AD_GC_LDAPS_PORT) .put(getFullSettingKey(REALM_ID, SessionFactorySettings.FOLLOW_REFERRALS_SETTING), FOLLOW_REFERRALS); if (randomBoolean()) { builder.put( diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/UserAttributeGroupsResolverTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/UserAttributeGroupsResolverTests.java index 5a8350739ef6b..256d710b3dfe2 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/UserAttributeGroupsResolverTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/UserAttributeGroupsResolverTests.java @@ -6,16 +6,20 @@ */ package org.elasticsearch.xpack.security.authc.ldap; +import com.carrotsearch.randomizedtesting.annotations.ThreadLeakFilters; import com.unboundid.ldap.sdk.Attribute; import com.unboundid.ldap.sdk.SearchRequest; import com.unboundid.ldap.sdk.SearchScope; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.TimeValue; +import org.elasticsearch.test.fixtures.smb.SmbTestContainer; +import org.elasticsearch.test.fixtures.testcontainers.TestContainersThreadFilter; import org.elasticsearch.xpack.core.security.authc.RealmConfig; import org.elasticsearch.xpack.core.security.authc.ldap.UserAttributeGroupsResolverSettings; import org.elasticsearch.xpack.core.security.support.NoOpLogger; import org.elasticsearch.xpack.security.authc.ldap.support.LdapUtils; +import org.junit.ClassRule; import java.util.Collection; import java.util.List; @@ -26,11 +30,15 @@ import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.hasSize; +@ThreadLeakFilters(filters = { TestContainersThreadFilter.class }) public class UserAttributeGroupsResolverTests extends GroupsResolverTestCase { public static final String BRUCE_BANNER_DN = "cn=Bruce Banner,CN=Users,DC=ad,DC=test,DC=elasticsearch,DC=com"; private static final RealmConfig.RealmIdentifier REALM_ID = new RealmConfig.RealmIdentifier("ldap", "realm1"); + @ClassRule + public static final SmbTestContainer smbFixture = new SmbTestContainer(); + public void testResolve() throws Exception { // falling back on the 'memberOf' attribute UserAttributeGroupsResolver resolver = new UserAttributeGroupsResolver(config(REALM_ID, Settings.EMPTY)); @@ -112,7 +120,7 @@ public void testResolveInvalidGroupAttribute() throws Exception { @Override protected String ldapUrl() { - return ActiveDirectorySessionFactoryTests.AD_LDAP_URL; + return smbFixture.getAdLdapUrl(); } @Override diff --git a/x-pack/test/idp-fixture/README.txt b/x-pack/test/idp-fixture/README.txt index 8e42bb142e4ee..c05f53772ed65 100644 --- a/x-pack/test/idp-fixture/README.txt +++ b/x-pack/test/idp-fixture/README.txt @@ -1 +1 @@ -Provisions OpenLDAP + shibboleth IDP 3.4.2 using docker compose +Provisions OpenLDAP + shibboleth IDP 3.4.2 testcontainer fixtures diff --git a/x-pack/test/idp-fixture/build.gradle b/x-pack/test/idp-fixture/build.gradle index 0f5363a278f60..691483bcfe5c3 100644 --- a/x-pack/test/idp-fixture/build.gradle +++ b/x-pack/test/idp-fixture/build.gradle @@ -1,40 +1,9 @@ -import org.elasticsearch.gradle.VersionProperties -import org.elasticsearch.gradle.Architecture -import static org.elasticsearch.gradle.internal.distribution.InternalElasticsearchDistributionTypes.DOCKER; +apply plugin: 'elasticsearch.java' +apply plugin: 'elasticsearch.cache-test-fixtures' -apply plugin: 'elasticsearch.test.fixtures' +dependencies { + testImplementation project(':test:framework') -dockerCompose { - composeAdditionalArgs = ['--compatibility'] -} - -tasks.named("preProcessFixture").configure { - file("${testFixturesDir}/shared/oidc").mkdirs() -} - -tasks.register("copyFiles", Sync) { - from file("oidc/override.properties.template") - into "${buildDir}/config" - doLast { - file("${buildDir}/config").setReadable(true, false) - file("${buildDir}/config/override.properties.template").setReadable(true, false) - } -} - -tasks.named("postProcessFixture").configure { - dependsOn "copyFiles" - inputs.dir("${testFixturesDir}/shared/oidc") - File confTemplate = file("${buildDir}/config/override.properties.template") - File confFile = file("${testFixturesDir}/shared/oidc/override.properties") - outputs.file(confFile) - doLast { - assert confTemplate.exists() - String confContents = confTemplate.text - .replace("\${MAPPED_PORT}", "${ext."test.fixtures.oidc-provider.tcp.8080"}") - confFile.text = confContents - } -} - -tasks.named('composePull').configure { - enabled = false // this task fails due to docker-compose oddities + api project(':test:fixtures:testcontainer-utils') + api "junit:junit:${versions.junit}" } diff --git a/x-pack/test/idp-fixture/docker-compose.yml b/x-pack/test/idp-fixture/docker-compose.yml deleted file mode 100644 index e431fa4ede611..0000000000000 --- a/x-pack/test/idp-fixture/docker-compose.yml +++ /dev/null @@ -1,67 +0,0 @@ -version: "3.7" -services: - openldap: - command: --copy-service --loglevel debug - image: "osixia/openldap:1.4.0" - ports: - - "389" - - "636" - environment: - LDAP_ADMIN_PASSWORD: "NickFuryHeartsES" - LDAP_DOMAIN: "oldap.test.elasticsearch.com" - LDAP_BASE_DN: "DC=oldap,DC=test,DC=elasticsearch,DC=com" - LDAP_TLS: "true" - LDAP_TLS_CRT_FILENAME: "ldap_server.pem" - LDAP_TLS_CA_CRT_FILENAME: "ca_server.pem" - LDAP_TLS_KEY_FILENAME: "ldap_server.key" - LDAP_TLS_VERIFY_CLIENT: "never" - LDAP_TLS_CIPHER_SUITE: "NORMAL" - LDAP_LOG_LEVEL: 256 - volumes: - - ./openldap/ldif/users.ldif:/container/service/slapd/assets/config/bootstrap/ldif/custom/20-bootstrap-users.ldif - - ./openldap/ldif/config.ldif:/container/service/slapd/assets/config/bootstrap/ldif/custom/10-bootstrap-config.ldif - - ./openldap/certs:/container/service/slapd/assets/certs - - shibboleth-idp: - build: - context: . - dockerfile: ./idp/Dockerfile - depends_on: - - openldap - environment: - - JETTY_MAX_HEAP=64m - ports: - - 4443 - expose: - - 4443 - links: - - openldap:openldap - restart: always #ensure ephemeral port mappings are properly updated - healthcheck: - test: curl -f -s --http0.9 http://localhost:4443 --connect-timeout 10 --max-time 10 --output - > /dev/null - interval: 5s - timeout: 20s - retries: 60 - start_period: 10s - - oidc-provider: - build: - context: . - dockerfile: ./oidc/Dockerfile - depends_on: - - http-proxy - ports: - - "8080" - expose: - - "8080" - volumes: - - ./testfixtures_shared/shared/oidc/:/config/c2id/ - - http-proxy: - image: "nginx:latest" - volumes: - - ./oidc/nginx.conf:/etc/nginx/nginx.conf - ports: - - "8888" - expose: - - "8888" diff --git a/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/HttpProxyTestContainer.java b/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/HttpProxyTestContainer.java new file mode 100644 index 0000000000000..4f7d3528f85d4 --- /dev/null +++ b/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/HttpProxyTestContainer.java @@ -0,0 +1,39 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.test.fixtures.idp; + +import org.elasticsearch.test.fixtures.testcontainers.DockerEnvironmentAwareTestContainer; +import org.testcontainers.containers.Network; +import org.testcontainers.images.builder.ImageFromDockerfile; + +public final class HttpProxyTestContainer extends DockerEnvironmentAwareTestContainer { + + public static final String DOCKER_BASE_IMAGE = "nginx:latest"; + private static final Integer PORT = 8888; + + /** + * for packer caching only + * */ + public HttpProxyTestContainer() { + this(Network.newNetwork()); + } + + public HttpProxyTestContainer(Network network) { + super( + new ImageFromDockerfile("es-http-proxy-fixture").withDockerfileFromBuilder( + builder -> builder.from(DOCKER_BASE_IMAGE).copy("oidc/nginx.conf", "/etc/nginx/nginx.conf").build() + ).withFileFromClasspath("oidc/nginx.conf", "/oidc/nginx.conf") + ); + addExposedPort(PORT); + withNetwork(network); + } + + public Integer getProxyPort() { + return getMappedPort(PORT); + } +} diff --git a/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/IdpTestContainer.java b/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/IdpTestContainer.java new file mode 100644 index 0000000000000..692cd4b081411 --- /dev/null +++ b/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/IdpTestContainer.java @@ -0,0 +1,170 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.test.fixtures.idp; + +import org.elasticsearch.test.fixtures.testcontainers.DockerEnvironmentAwareTestContainer; +import org.junit.rules.TemporaryFolder; +import org.testcontainers.containers.Network; +import org.testcontainers.containers.wait.strategy.Wait; +import org.testcontainers.images.builder.ImageFromDockerfile; +import org.testcontainers.images.builder.dockerfile.statement.SingleArgumentStatement; + +import java.io.IOException; +import java.nio.file.Path; + +import static org.elasticsearch.test.fixtures.ResourceUtils.copyResourceToFile; + +public final class IdpTestContainer extends DockerEnvironmentAwareTestContainer { + + public static final String DOCKER_BASE_IMAGE = "openjdk:11.0.16-jre"; + + private final TemporaryFolder temporaryFolder = new TemporaryFolder(); + private Path certsPath; + + /** + * for packer caching only + * */ + protected IdpTestContainer() { + this(Network.newNetwork()); + } + + public IdpTestContainer(Network network) { + super( + new ImageFromDockerfile("es-idp-testfixture").withDockerfileFromBuilder( + builder -> builder.from(DOCKER_BASE_IMAGE) + .env("jetty_version", "9.3.27.v20190418") + .env("jetty_hash", "7c7c80dd1c9f921771e2b1a05deeeec652d5fcaa") + .env("idp_version", "3.4.3") + .env("idp_hash", "eb86bc7b6366ce2a44f97cae1b014d307b84257e3149469b22b2d091007309db") + .env("dta_hash", "2f547074b06952b94c35631398f36746820a7697") + .env("slf4j_version", "1.7.25") + .env("slf4j_hash", "da76ca59f6a57ee3102f8f9bd9cee742973efa8a") + .env("logback_version", "1.2.3") + .env("logback_classic_hash", "7c4f3c474fb2c041d8028740440937705ebb473a") + .env("logback_core_hash", "864344400c3d4d92dfeb0a305dc87d953677c03c") + .env("logback_access_hash", "e8a841cb796f6423c7afd8738df6e0e4052bf24a") + + .env("JETTY_HOME", "/opt/jetty-home") + .env("JETTY_BASE", "/opt/shib-jetty-base") + .env("PATH", "$PATH:$JAVA_HOME/bin") + .env("JETTY_BROWSER_SSL_KEYSTORE_PASSWORD", "secret") + .env("JETTY_BACKCHANNEL_SSL_KEYSTORE_PASSWORD", "secret") + .env("JETTY_MAX_HEAP", "64m") + // Manually override the jetty keystore otherwise it will attempt to download and fail + .run("mkdir -p /opt/shib-jetty-base/modules") + .copy("idp/jetty-custom/ssl.mod", "/opt/shib-jetty-base/modules/ssl.mod") + .copy("idp/jetty-custom/keystore", "/opt/shib-jetty-base/etc/keystore") + // Download Jetty, verify the hash, and install, initialize a new base + .run( + "wget -q https://repo.maven.apache.org/maven2/org/eclipse/jetty/jetty-distribution/$jetty_version/jetty-distribution-$jetty_version.tar.gz" + + " && echo \"$jetty_hash jetty-distribution-$jetty_version.tar.gz\" | sha1sum -c -" + + " && tar -zxvf jetty-distribution-$jetty_version.tar.gz -C /opt" + + " && ln -s /opt/jetty-distribution-$jetty_version/ /opt/jetty-home" + ) + // Config Jetty + .run( + "mkdir -p /opt/shib-jetty-base/modules /opt/shib-jetty-base/lib/ext /opt/shib-jetty-base/lib/logging /opt/shib-jetty-base/resources" + + " && cd /opt/shib-jetty-base" + + " && touch start.ini" + + " && java -jar ../jetty-home/start.jar --add-to-startd=http,https,deploy,ext,annotations,jstl,rewrite" + ) + // Download Shibboleth IdP, verify the hash, and install + .run( + "wget -q https://shibboleth.net/downloads/identity-provider/archive/$idp_version/shibboleth-identity-provider-$idp_version.tar.gz" + + " && echo \"$idp_hash shibboleth-identity-provider-$idp_version.tar.gz\" | sha256sum -c -" + + " && tar -zxvf shibboleth-identity-provider-$idp_version.tar.gz -C /opt" + + " && ln -s /opt/shibboleth-identity-provider-$idp_version/ /opt/shibboleth-idp" + ) + // Download the library to allow SOAP Endpoints, verify the hash, and place + .run( + "wget -q https://build.shibboleth.net/nexus/content/repositories/releases/net/shibboleth/utilities/jetty9/jetty9-dta-ssl/1.0.0/jetty9-dta-ssl-1.0.0.jar" + + " && echo \"$dta_hash jetty9-dta-ssl-1.0.0.jar\" | sha1sum -c -" + + " && mv jetty9-dta-ssl-1.0.0.jar /opt/shib-jetty-base/lib/ext/" + ) + // Download the slf4j library for Jetty logging, verify the hash, and place + .run( + "wget -q https://repo.maven.apache.org/maven2/org/slf4j/slf4j-api/$slf4j_version/slf4j-api-$slf4j_version.jar" + + " && echo \"$slf4j_hash slf4j-api-$slf4j_version.jar\" | sha1sum -c -" + + " && mv slf4j-api-$slf4j_version.jar /opt/shib-jetty-base/lib/logging/" + ) + // Download the logback_classic library for Jetty logging, verify the hash, and place + .run( + "wget -q https://repo.maven.apache.org/maven2/ch/qos/logback/logback-classic/$logback_version/logback-classic-$logback_version.jar" + + " && echo \"$logback_classic_hash logback-classic-$logback_version.jar\" | sha1sum -c -" + + " && mv logback-classic-$logback_version.jar /opt/shib-jetty-base/lib/logging/" + ) + // Download the logback-core library for Jetty logging, verify the hash, and place + .run( + "wget -q https://repo.maven.apache.org/maven2/ch/qos/logback/logback-core/$logback_version/logback-core-$logback_version.jar" + + " && echo \"$logback_core_hash logback-core-$logback_version.jar\" | sha1sum -c -" + + " && mv logback-core-$logback_version.jar /opt/shib-jetty-base/lib/logging/" + ) + // Download the logback-access library for Jetty logging, verify the hash, and place + .run( + "wget -q https://repo.maven.apache.org/maven2/ch/qos/logback/logback-access/$logback_version/logback-access-$logback_version.jar" + + " && echo \"$logback_access_hash logback-access-$logback_version.jar\" | sha1sum -c -" + + " && mv logback-access-$logback_version.jar /opt/shib-jetty-base/lib/logging/" + ) + // ## Copy local files + .copy("idp/shib-jetty-base/", "/opt/shib-jetty-base/") + .copy("idp/shibboleth-idp/", "/opt/shibboleth-idp/") + .copy("idp/bin/", "/usr/local/bin/") + // Setting owner ownership and permissions + .run( + "useradd jetty -U -s /bin/false" + + " && chown -R root:jetty /opt" + + " && chmod -R 640 /opt" + + " && chown -R root:jetty /opt/shib-jetty-base" + + " && chmod -R 640 /opt/shib-jetty-base" + + " && chmod -R 750 /opt/shibboleth-idp/bin" + ) + .run("chmod 750 /usr/local/bin/run-jetty.sh /usr/local/bin/init-idp.sh") + .run("chmod +x /opt/jetty-home/bin/jetty.sh") + // Opening 4443 (browser TLS), 8443 (mutual auth TLS) + .cmd("run-jetty.sh") + .withStatement( + new SingleArgumentStatement( + "HEALTHCHECK", + "CMD curl -f -s --http0.9 http://localhost:4443 " + "--connect-timeout 10 --max-time 10 --output - > /dev/null" + ) + ) + // .expose(4443) + .build() + ) + .withFileFromClasspath("idp/jetty-custom/ssl.mod", "/idp/jetty-custom/ssl.mod") + .withFileFromClasspath("idp/jetty-custom/keystore", "/idp/jetty-custom/keystore") + .withFileFromClasspath("idp/shib-jetty-base/", "/idp/shib-jetty-base/") + .withFileFromClasspath("idp/shibboleth-idp/", "/idp/shibboleth-idp/") + .withFileFromClasspath("idp/bin/", "/idp/bin/") + ); + withNetworkAliases("idp"); + withNetwork(network); + waitingFor(Wait.forHealthcheck()); + addExposedPorts(4443, 8443); + } + + @Override + public void stop() { + super.stop(); + temporaryFolder.delete(); + } + + public Path getBrowserPem() { + try { + temporaryFolder.create(); + certsPath = temporaryFolder.newFolder("certs").toPath(); + return copyResourceToFile(getClass(), certsPath, "idp/shibboleth-idp/credentials/idp-browser.pem"); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public Integer getDefaultPort() { + return getMappedPort(4443); + } +} diff --git a/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/OidcProviderTestContainer.java b/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/OidcProviderTestContainer.java new file mode 100644 index 0000000000000..89090fa6e11bc --- /dev/null +++ b/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/OidcProviderTestContainer.java @@ -0,0 +1,66 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.test.fixtures.idp; + +import org.elasticsearch.test.fixtures.testcontainers.DockerEnvironmentAwareTestContainer; +import org.testcontainers.containers.Network; +import org.testcontainers.images.builder.ImageFromDockerfile; +import org.testcontainers.images.builder.Transferable; + +public final class OidcProviderTestContainer extends DockerEnvironmentAwareTestContainer { + + private static final int PORT = 8080; + + /** + * for packer caching only + * */ + protected OidcProviderTestContainer() { + this(Network.newNetwork()); + } + + public OidcProviderTestContainer(Network network) { + super( + new ImageFromDockerfile("es-oidc-provider-fixture").withFileFromClasspath("oidc/setup.sh", "/oidc/setup.sh") + // we cannot make use of docker file builder + // as it does not support multi-stage builds + .withFileFromClasspath("Dockerfile", "oidc/Dockerfile") + ); + withNetworkAliases("oidc-provider"); + withNetwork(network); + addExposedPort(PORT); + } + + @Override + public void start() { + super.start(); + copyFileToContainer( + Transferable.of( + "op.issuer=http://127.0.0.1:" + + getMappedPort(PORT) + + "/c2id\n" + + "op.authz.endpoint=http://127.0.0.1:" + + getMappedPort(PORT) + + "/c2id-login/\n" + + "op.reg.apiAccessTokenSHA256=d1c4fa70d9ee708d13cfa01daa0e060a05a2075a53c5cc1ad79e460e96ab5363\n" + + "jose.jwkSer=RnVsbCBrZXk6CnsKICAia2V5cyI6IFsKICAgIHsKICAgICAgInAiOiAiLXhhN2d2aW5tY3N3QXU3Vm1mV2loZ2o3U3gzUzhmd2dFSTdMZEVveW5FU1RzcElaeUY5aHc0NVhQZmI5VHlpbzZsOHZTS0F5RmU4T2lOalpkNE1Ra0ttYlJzTmxxR1Y5VlBoWF84UG1JSm5mcGVhb3E5YnZfU0k1blZHUl9zYUUzZE9sTEE2VWpaS0lsRVBNb0ZuRlZCMUFaUU9qQlhRRzZPTDg2eDZ2NHMwIiwKICAgICAgImt0eSI6ICJSU0EiLAogICAgICAicSI6ICJ2Q3pDQUlpdHV0MGx1V0djQloyLUFabURLc1RxNkkxcUp0RmlEYkIyZFBNQVlBNldOWTdaWEZoVWxsSjJrT2ZELWdlYjlkYkN2ODBxNEwyajVZSjZoOTBUc1NRWWVHRlljN1lZMGdCMU5VR3l5cXctb29QN0EtYlJmMGI3b3I4ajZJb0hzQTZKa2JranN6c3otbkJ2U2RmUURlZkRNSVc3Ni1ZWjN0c2hsY2MiLAogICAgICAiZCI6ICJtbFBOcm1zVVM5UmJtX1I5SElyeHdmeFYzZnJ2QzlaQktFZzRzc1ZZaThfY09lSjV2U1hyQV9laEtwa2g4QVhYaUdWUGpQbVlyd29xQzFVUksxUkZmLVg0dG10emV2OUVHaU12Z0JCaEF5RkdTSUd0VUNla2x4Q2dhb3BpMXdZSU1Bd0M0STZwMUtaZURxTVNCWVZGeHA5ZWlJZ2pwb05JbV9lR3hXUUs5VHNnYmk5T3lyc1VqaE9KLVczN2JVMEJWUU56UXpxODhCcGxmNzM3VmV1dy1FeDZaMk1iWXR3SWdfZ0JVb0JEZ0NrZkhoOVE4MElYcEZRV0x1RzgwenFrdkVwTHZ0RWxLbDRvQ3BHVnBjcmFUOFNsOGpYc3FDT1k0dnVRT19LRVUzS2VPNUNJbHd4eEhJYXZjQTE5cHFpSWJ5cm1LbThxS0ZEWHluUFJMSGFNZ1EiLAogICAgICAiZSI6ICJBUUFCIiwKICAgICAgImtpZCI6ICJyc2EzODRfMjA0OCIsCiAgICAgICJxaSI6ICJzMldTamVrVDl3S2JPbk9neGNoaDJPY3VubzE2Y20wS281Z3hoUWJTdVMyMldfUjJBR2ZVdkRieGF0cTRLakQ3THo3X1k2TjdTUkwzUVpudVhoZ1djeXgyNGhrUGppQUZLNmlkYVZKQzJqQmgycEZTUDVTNXZxZ0lsME12eWY4NjlwdkN4S0NzaGRKMGdlRWhveE93VkRPYXJqdTl2Zm9IQV90LWJoRlZrUnciLAogICAgICAiZHAiOiAiQlJhQTFqYVRydG9mTHZBSUJBYW1OSEVhSm51RU9zTVJJMFRCZXFuR1BNUm0tY2RjSG1OUVo5WUtqb2JpdXlmbnhGZ0piVDlSeElBRG0ySkpoZEp5RTN4Y1dTSzhmSjBSM1Jick1aT1dwako0QmJTVzFtU1VtRnlKTGxib3puRFhZR2RaZ1hzS0o1UkFrRUNQZFBCY3YwZVlkbk9NYWhfZndfaFZoNjRuZ2tFIiwKICAgICAgImFsZyI6ICJSU0EzODQiLAogICAgICAiZHEiOiAiUFJoVERKVlR3cDNXaDZfWFZrTjIwMUlpTWhxcElrUDN1UTYyUlRlTDNrQ2ZXSkNqMkZPLTRxcVRIQk0tQjZJWUVPLXpoVWZyQnhiMzJ1djNjS2JDWGFZN3BJSFJxQlFEQWQ2WGhHYzlwc0xqNThXd3VGY2RncERJYUFpRjNyc3NUMjJ4UFVvYkJFTVdBalV3bFJrNEtNTjItMnpLQk5FR3lIcDIzOUpKdnpVIiwKICAgICAgIm4iOiAidUpDWDVDbEZpM0JnTXBvOWhRSVZ2SDh0Vi1jLTVFdG5OeUZxVm91R3NlNWwyUG92MWJGb0tsRllsU25YTzNWUE9KRWR3azNDdl9VT0UtQzlqZERYRHpvS3Z4RURaTVM1TDZWMFpIVEJoNndIOV9iN3JHSlBxLV9RdlNkejczSzZxbHpGaUtQamRvdTF6VlFYTmZfblBZbnRnQkdNRUtBc1pRNGp0cWJCdE5lV0h0MF9UM001cEktTV9KNGVlRWpCTW95TkZuU2ExTEZDVmZRNl9YVnpjelp1TlRGMlh6UmdRWkFmcmJGRXZ6eXR1TzVMZTNTTXFrUUFJeDhFQmkwYXVlRUNqNEQ4cDNVNXFVRG92NEF2VnRJbUZlbFJvb1pBMHJtVW1KRHJ4WExrVkhuVUpzaUF6ZW9TLTNBSnV1bHJkMGpuNjJ5VjZHV2dFWklZMVNlZVd3IgogICAgfQogIF0KfQo\n" + + "op.authz.alwaysPromptForConsent=true\n" + + "op.authz.alwaysPromptForAuth=true" + ), + "config/c2id/override.properties" + ); + } + + public String getC2OPUrl() { + return "http://127.0.0.1:" + getMappedPort(PORT); + } + + public String getC2IssuerUrl() { + return getC2OPUrl() + "/c2id"; + } + +} diff --git a/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/OpenLdapTestContainer.java b/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/OpenLdapTestContainer.java new file mode 100644 index 0000000000000..2f65134f2ec72 --- /dev/null +++ b/x-pack/test/idp-fixture/src/main/java/org/elasticsearch/test/fixtures/idp/OpenLdapTestContainer.java @@ -0,0 +1,109 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.test.fixtures.idp; + +import org.elasticsearch.test.fixtures.testcontainers.DockerEnvironmentAwareTestContainer; +import org.junit.rules.TemporaryFolder; +import org.testcontainers.containers.Network; +import org.testcontainers.images.builder.ImageFromDockerfile; + +import java.io.IOException; +import java.nio.file.Path; + +import static org.elasticsearch.test.fixtures.ResourceUtils.copyResourceToFile; + +public final class OpenLdapTestContainer extends DockerEnvironmentAwareTestContainer { + + public static final String DOCKER_BASE_IMAGE = "osixia/openldap:1.4.0"; + + private final TemporaryFolder temporaryFolder = new TemporaryFolder(); + private Path certsPath; + + public OpenLdapTestContainer() { + this(Network.newNetwork()); + } + + public OpenLdapTestContainer(Network network) { + super( + new ImageFromDockerfile("es-openldap-testfixture").withDockerfileFromBuilder( + builder -> builder.from(DOCKER_BASE_IMAGE) + .env("LDAP_ADMIN_PASSWORD", "NickFuryHeartsES") + .env("LDAP_DOMAIN", "oldap.test.elasticsearch.com") + .env("LDAP_BASE_DN", "DC=oldap,DC=test,DC=elasticsearch,DC=com") + .env("LDAP_TLS", "true") + .env("LDAP_TLS_CRT_FILENAME", "ldap_server.pem") + .env("LDAP_TLS_CA_CRT_FILENAME", "ca_server.pem") + .env("LDAP_TLS_KEY_FILENAME", "ldap_server.key") + .env("LDAP_TLS_VERIFY_CLIENT", "never") + .env("LDAP_TLS_CIPHER_SUITE", "NORMAL") + .env("LDAP_LOG_LEVEL", "256") + .copy( + "openldap/ldif/users.ldif", + "/container/service/slapd/assets/config/bootstrap/ldif/custom/20-bootstrap-users.ldif" + ) + .copy( + "openldap/ldif/config.ldif", + "/container/service/slapd/assets/config/bootstrap/ldif/custom/10-bootstrap-config.ldif" + ) + .copy("openldap/certs", "/container/service/slapd/assets/certs") + + .build() + ) + .withFileFromClasspath("openldap/certs", "/openldap/certs/") + .withFileFromClasspath("openldap/ldif/users.ldif", "/openldap/ldif/users.ldif") + .withFileFromClasspath("openldap/ldif/config.ldif", "/openldap/ldif/config.ldif") + ); + // withLogConsumer(new Slf4jLogConsumer(logger())); + withNetworkAliases("openldap"); + withNetwork(network); + withExposedPorts(389, 636); + } + + public String getLdapUrl() { + return "ldaps://localhost:" + getMappedPort(636); + } + + @Override + public void start() { + super.start(); + setupCerts(); + } + + @Override + public void stop() { + super.stop(); + temporaryFolder.delete(); + } + + private void setupCerts() { + try { + temporaryFolder.create(); + certsPath = temporaryFolder.newFolder("certs").toPath(); + copyResourceToFile(getClass(), certsPath, "openldap/certs/ca.jks"); + copyResourceToFile(getClass(), certsPath, "openldap/certs/ca_server.key"); + copyResourceToFile(getClass(), certsPath, "openldap/certs/ca_server.pem"); + copyResourceToFile(getClass(), certsPath, "openldap/certs/dhparam.pem"); + copyResourceToFile(getClass(), certsPath, "openldap/certs/ldap_server.key"); + copyResourceToFile(getClass(), certsPath, "openldap/certs/ldap_server.pem"); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public Path getJavaKeyStorePath() { + return certsPath.resolve("ca.jks"); + } + + public Path getCaCertPath() { + return certsPath.resolve("ca_server.pem"); + } + + public Integer getDefaultPort() { + return getMappedPort(636); + } +} diff --git a/x-pack/test/idp-fixture/idp/Dockerfile b/x-pack/test/idp-fixture/src/main/resources/idp/Dockerfile similarity index 100% rename from x-pack/test/idp-fixture/idp/Dockerfile rename to x-pack/test/idp-fixture/src/main/resources/idp/Dockerfile diff --git a/x-pack/test/idp-fixture/idp/bin/init-idp.sh b/x-pack/test/idp-fixture/src/main/resources/idp/bin/init-idp.sh similarity index 100% rename from x-pack/test/idp-fixture/idp/bin/init-idp.sh rename to x-pack/test/idp-fixture/src/main/resources/idp/bin/init-idp.sh diff --git a/x-pack/test/idp-fixture/idp/bin/run-jetty.sh b/x-pack/test/idp-fixture/src/main/resources/idp/bin/run-jetty.sh similarity index 97% rename from x-pack/test/idp-fixture/idp/bin/run-jetty.sh rename to x-pack/test/idp-fixture/src/main/resources/idp/bin/run-jetty.sh index 24ece94c2715d..0160cc613407d 100644 --- a/x-pack/test/idp-fixture/idp/bin/run-jetty.sh +++ b/x-pack/test/idp-fixture/src/main/resources/idp/bin/run-jetty.sh @@ -20,7 +20,7 @@ exit_code=$? end_time=$(date +%s) duration=$((end_time - start_time)) -if [ $duration -lt 5 ]; then +if [ $duration -lt 10 ]; then /opt/jetty-home/bin/jetty.sh run exit_code=$? fi diff --git a/x-pack/test/idp-fixture/idp/jetty-custom/keystore b/x-pack/test/idp-fixture/src/main/resources/idp/jetty-custom/keystore similarity index 100% rename from x-pack/test/idp-fixture/idp/jetty-custom/keystore rename to x-pack/test/idp-fixture/src/main/resources/idp/jetty-custom/keystore diff --git a/x-pack/test/idp-fixture/idp/jetty-custom/ssl.mod b/x-pack/test/idp-fixture/src/main/resources/idp/jetty-custom/ssl.mod similarity index 100% rename from x-pack/test/idp-fixture/idp/jetty-custom/ssl.mod rename to x-pack/test/idp-fixture/src/main/resources/idp/jetty-custom/ssl.mod diff --git a/x-pack/test/idp-fixture/idp/shib-jetty-base/etc/jetty-backchannel.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/etc/jetty-backchannel.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shib-jetty-base/etc/jetty-backchannel.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/etc/jetty-backchannel.xml diff --git a/x-pack/test/idp-fixture/idp/shib-jetty-base/etc/jetty-logging.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/etc/jetty-logging.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shib-jetty-base/etc/jetty-logging.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/etc/jetty-logging.xml diff --git a/x-pack/test/idp-fixture/idp/shib-jetty-base/etc/jetty-requestlog.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/etc/jetty-requestlog.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shib-jetty-base/etc/jetty-requestlog.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/etc/jetty-requestlog.xml diff --git a/x-pack/test/idp-fixture/idp/shib-jetty-base/etc/jetty-rewrite.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/etc/jetty-rewrite.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shib-jetty-base/etc/jetty-rewrite.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/etc/jetty-rewrite.xml diff --git a/x-pack/test/idp-fixture/idp/shib-jetty-base/etc/jetty-ssl-context.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/etc/jetty-ssl-context.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shib-jetty-base/etc/jetty-ssl-context.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/etc/jetty-ssl-context.xml diff --git a/x-pack/test/idp-fixture/idp/shib-jetty-base/modules/backchannel.mod b/x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/modules/backchannel.mod similarity index 100% rename from x-pack/test/idp-fixture/idp/shib-jetty-base/modules/backchannel.mod rename to x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/modules/backchannel.mod diff --git a/x-pack/test/idp-fixture/idp/shib-jetty-base/resources/logback-access.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/resources/logback-access.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shib-jetty-base/resources/logback-access.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/resources/logback-access.xml diff --git a/x-pack/test/idp-fixture/idp/shib-jetty-base/resources/logback.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/resources/logback.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shib-jetty-base/resources/logback.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/resources/logback.xml diff --git a/x-pack/test/idp-fixture/idp/shib-jetty-base/start.d/backchannel.ini b/x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/start.d/backchannel.ini similarity index 100% rename from x-pack/test/idp-fixture/idp/shib-jetty-base/start.d/backchannel.ini rename to x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/start.d/backchannel.ini diff --git a/x-pack/test/idp-fixture/idp/shib-jetty-base/start.d/ssl.ini b/x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/start.d/ssl.ini similarity index 100% rename from x-pack/test/idp-fixture/idp/shib-jetty-base/start.d/ssl.ini rename to x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/start.d/ssl.ini diff --git a/x-pack/test/idp-fixture/idp/shib-jetty-base/start.ini b/x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/start.ini similarity index 100% rename from x-pack/test/idp-fixture/idp/shib-jetty-base/start.ini rename to x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/start.ini diff --git a/x-pack/test/idp-fixture/idp/shib-jetty-base/webapps/idp.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/webapps/idp.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shib-jetty-base/webapps/idp.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shib-jetty-base/webapps/idp.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/access-control.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/access-control.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/access-control.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/access-control.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/admin/general-admin.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/admin/general-admin.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/admin/general-admin.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/admin/general-admin.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/admin/metrics.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/admin/metrics.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/admin/metrics.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/admin/metrics.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/attribute-filter.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/attribute-filter.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/attribute-filter.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/attribute-filter.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/attribute-resolver.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/attribute-resolver.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/attribute-resolver.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/attribute-resolver.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/audit.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/audit.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/audit.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/audit.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/authn-comparison.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/authn-comparison.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/authn-comparison.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/authn-comparison.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/authn-events-flow.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/authn-events-flow.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/authn-events-flow.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/authn-events-flow.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/duo-authn-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/duo-authn-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/duo-authn-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/duo-authn-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/duo.properties b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/duo.properties similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/duo.properties rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/duo.properties diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/external-authn-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/external-authn-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/external-authn-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/external-authn-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/function-authn-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/function-authn-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/function-authn-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/function-authn-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/general-authn.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/general-authn.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/general-authn.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/general-authn.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/ipaddress-authn-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/ipaddress-authn-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/ipaddress-authn-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/ipaddress-authn-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/jaas-authn-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/jaas-authn-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/jaas-authn-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/jaas-authn-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/jaas.config b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/jaas.config similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/jaas.config rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/jaas.config diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/krb5-authn-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/krb5-authn-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/krb5-authn-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/krb5-authn-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/ldap-authn-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/ldap-authn-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/ldap-authn-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/ldap-authn-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/mfa-authn-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/mfa-authn-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/mfa-authn-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/mfa-authn-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/password-authn-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/password-authn-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/password-authn-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/password-authn-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/remoteuser-authn-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/remoteuser-authn-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/remoteuser-authn-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/remoteuser-authn-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/remoteuser-internal-authn-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/remoteuser-internal-authn-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/remoteuser-internal-authn-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/remoteuser-internal-authn-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/spnego-authn-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/spnego-authn-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/spnego-authn-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/spnego-authn-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/x509-authn-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/x509-authn-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/x509-authn-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/x509-authn-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/x509-internal-authn-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/x509-internal-authn-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/authn/x509-internal-authn-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/authn/x509-internal-authn-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/c14n/attribute-sourced-subject-c14n-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/c14n/attribute-sourced-subject-c14n-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/c14n/attribute-sourced-subject-c14n-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/c14n/attribute-sourced-subject-c14n-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/c14n/simple-subject-c14n-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/c14n/simple-subject-c14n-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/c14n/simple-subject-c14n-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/c14n/simple-subject-c14n-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/c14n/subject-c14n-events-flow.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/c14n/subject-c14n-events-flow.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/c14n/subject-c14n-events-flow.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/c14n/subject-c14n-events-flow.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/c14n/subject-c14n.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/c14n/subject-c14n.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/c14n/subject-c14n.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/c14n/subject-c14n.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/c14n/x500-subject-c14n-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/c14n/x500-subject-c14n-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/c14n/x500-subject-c14n-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/c14n/x500-subject-c14n-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/cas-protocol.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/cas-protocol.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/cas-protocol.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/cas-protocol.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/credentials.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/credentials.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/credentials.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/credentials.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/errors.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/errors.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/errors.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/errors.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/global.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/global.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/global.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/global.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/idp.properties b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/idp.properties similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/idp.properties rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/idp.properties diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/intercept/consent-intercept-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/intercept/consent-intercept-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/intercept/consent-intercept-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/intercept/consent-intercept-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/intercept/context-check-intercept-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/intercept/context-check-intercept-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/intercept/context-check-intercept-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/intercept/context-check-intercept-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/intercept/expiring-password-intercept-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/intercept/expiring-password-intercept-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/intercept/expiring-password-intercept-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/intercept/expiring-password-intercept-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/intercept/impersonate-intercept-config.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/intercept/impersonate-intercept-config.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/intercept/impersonate-intercept-config.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/intercept/impersonate-intercept-config.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/intercept/intercept-events-flow.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/intercept/intercept-events-flow.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/intercept/intercept-events-flow.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/intercept/intercept-events-flow.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/intercept/profile-intercept.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/intercept/profile-intercept.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/intercept/profile-intercept.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/intercept/profile-intercept.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/ldap.properties b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/ldap.properties similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/ldap.properties rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/ldap.properties diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/logback.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/logback.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/logback.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/logback.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/metadata-providers.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/metadata-providers.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/metadata-providers.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/metadata-providers.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/relying-party.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/relying-party.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/relying-party.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/relying-party.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/saml-nameid.properties b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/saml-nameid.properties similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/saml-nameid.properties rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/saml-nameid.properties diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/saml-nameid.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/saml-nameid.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/saml-nameid.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/saml-nameid.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/services.properties b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/services.properties similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/services.properties rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/services.properties diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/services.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/services.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/services.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/services.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/conf/session-manager.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/session-manager.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/conf/session-manager.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/conf/session-manager.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/README b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/README similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/README rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/README diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/ca_server.pem b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/ca_server.pem similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/ca_server.pem rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/ca_server.pem diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-backchannel.crt b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-backchannel.crt similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-backchannel.crt rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-backchannel.crt diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-backchannel.p12 b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-backchannel.p12 similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-backchannel.p12 rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-backchannel.p12 diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-browser.key b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-browser.key similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-browser.key rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-browser.key diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-browser.p12 b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-browser.p12 similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-browser.p12 rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-browser.p12 diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-browser.pem b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-browser.pem similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-browser.pem rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-browser.pem diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-encryption.crt b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-encryption.crt similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-encryption.crt rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-encryption.crt diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-encryption.key b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-encryption.key similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-encryption.key rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-encryption.key diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-signing.crt b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-signing.crt similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-signing.crt rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-signing.crt diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-signing.key b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-signing.key similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/idp-signing.key rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/idp-signing.key diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/sealer.jks b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/sealer.jks similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/sealer.jks rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/sealer.jks diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/sealer.kver b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/sealer.kver similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/sealer.kver rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/sealer.kver diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/sp-signing.crt b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/sp-signing.crt similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/sp-signing.crt rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/sp-signing.crt diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/sp-signing.key b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/sp-signing.key similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/credentials/sp-signing.key rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/credentials/sp-signing.key diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/metadata/README.asciidoc b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/metadata/README.asciidoc similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/metadata/README.asciidoc rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/metadata/README.asciidoc diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/metadata/idp-docs-metadata.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/metadata/idp-docs-metadata.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/metadata/idp-docs-metadata.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/metadata/idp-docs-metadata.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/metadata/idp-metadata.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/metadata/idp-metadata.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/metadata/idp-metadata.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/metadata/idp-metadata.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/metadata/sp-metadata.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/metadata/sp-metadata.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/metadata/sp-metadata.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/metadata/sp-metadata.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/metadata/sp-metadata2.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/metadata/sp-metadata2.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/metadata/sp-metadata2.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/metadata/sp-metadata2.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/metadata/sp-metadata3.xml b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/metadata/sp-metadata3.xml similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/metadata/sp-metadata3.xml rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/metadata/sp-metadata3.xml diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/admin/unlock-keys.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/admin/unlock-keys.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/admin/unlock-keys.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/admin/unlock-keys.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/client-storage/client-storage-read.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/client-storage/client-storage-read.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/client-storage/client-storage-read.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/client-storage/client-storage-read.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/client-storage/client-storage-write.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/client-storage/client-storage-write.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/client-storage/client-storage-write.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/client-storage/client-storage-write.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/duo.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/duo.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/duo.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/duo.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/error.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/error.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/error.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/error.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/intercept/attribute-release.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/intercept/attribute-release.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/intercept/attribute-release.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/intercept/attribute-release.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/intercept/expiring-password.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/intercept/expiring-password.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/intercept/expiring-password.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/intercept/expiring-password.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/intercept/impersonate.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/intercept/impersonate.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/intercept/impersonate.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/intercept/impersonate.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/intercept/terms-of-use.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/intercept/terms-of-use.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/intercept/terms-of-use.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/intercept/terms-of-use.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/login-error.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/login-error.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/login-error.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/login-error.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/login.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/login.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/login.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/login.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/logout-complete.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/logout-complete.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/logout-complete.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/logout-complete.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/logout-propagate.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/logout-propagate.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/logout-propagate.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/logout-propagate.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/logout.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/logout.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/logout.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/logout.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/spnego-unavailable.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/spnego-unavailable.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/spnego-unavailable.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/spnego-unavailable.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/views/user-prefs.vm b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/user-prefs.vm similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/views/user-prefs.vm rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/views/user-prefs.vm diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/webapp/css/consent.css b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/webapp/css/consent.css similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/webapp/css/consent.css rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/webapp/css/consent.css diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/webapp/css/logout.css b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/webapp/css/logout.css similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/webapp/css/logout.css rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/webapp/css/logout.css diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/webapp/css/main.css b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/webapp/css/main.css similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/webapp/css/main.css rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/webapp/css/main.css diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/webapp/images/dummylogo-mobile.png b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/webapp/images/dummylogo-mobile.png similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/webapp/images/dummylogo-mobile.png rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/webapp/images/dummylogo-mobile.png diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/webapp/images/dummylogo.png b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/webapp/images/dummylogo.png similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/webapp/images/dummylogo.png rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/webapp/images/dummylogo.png diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/webapp/images/failure-32x32.png b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/webapp/images/failure-32x32.png similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/webapp/images/failure-32x32.png rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/webapp/images/failure-32x32.png diff --git a/x-pack/test/idp-fixture/idp/shibboleth-idp/webapp/images/success-32x32.png b/x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/webapp/images/success-32x32.png similarity index 100% rename from x-pack/test/idp-fixture/idp/shibboleth-idp/webapp/images/success-32x32.png rename to x-pack/test/idp-fixture/src/main/resources/idp/shibboleth-idp/webapp/images/success-32x32.png diff --git a/x-pack/test/idp-fixture/oidc/Dockerfile b/x-pack/test/idp-fixture/src/main/resources/oidc/Dockerfile similarity index 100% rename from x-pack/test/idp-fixture/oidc/Dockerfile rename to x-pack/test/idp-fixture/src/main/resources/oidc/Dockerfile diff --git a/x-pack/test/idp-fixture/oidc/nginx.conf b/x-pack/test/idp-fixture/src/main/resources/oidc/nginx.conf similarity index 100% rename from x-pack/test/idp-fixture/oidc/nginx.conf rename to x-pack/test/idp-fixture/src/main/resources/oidc/nginx.conf diff --git a/x-pack/test/idp-fixture/oidc/op-jwks.json b/x-pack/test/idp-fixture/src/main/resources/oidc/op-jwks.json similarity index 100% rename from x-pack/test/idp-fixture/oidc/op-jwks.json rename to x-pack/test/idp-fixture/src/main/resources/oidc/op-jwks.json diff --git a/x-pack/test/idp-fixture/oidc/override.properties.template b/x-pack/test/idp-fixture/src/main/resources/oidc/override.properties.template similarity index 100% rename from x-pack/test/idp-fixture/oidc/override.properties.template rename to x-pack/test/idp-fixture/src/main/resources/oidc/override.properties.template diff --git a/x-pack/test/idp-fixture/oidc/setup.sh b/x-pack/test/idp-fixture/src/main/resources/oidc/setup.sh similarity index 100% rename from x-pack/test/idp-fixture/oidc/setup.sh rename to x-pack/test/idp-fixture/src/main/resources/oidc/setup.sh diff --git a/x-pack/test/idp-fixture/openldap/certs/README b/x-pack/test/idp-fixture/src/main/resources/openldap/certs/README similarity index 100% rename from x-pack/test/idp-fixture/openldap/certs/README rename to x-pack/test/idp-fixture/src/main/resources/openldap/certs/README diff --git a/x-pack/test/idp-fixture/openldap/certs/ca.jks b/x-pack/test/idp-fixture/src/main/resources/openldap/certs/ca.jks similarity index 100% rename from x-pack/test/idp-fixture/openldap/certs/ca.jks rename to x-pack/test/idp-fixture/src/main/resources/openldap/certs/ca.jks diff --git a/x-pack/test/idp-fixture/openldap/certs/ca_server.key b/x-pack/test/idp-fixture/src/main/resources/openldap/certs/ca_server.key similarity index 100% rename from x-pack/test/idp-fixture/openldap/certs/ca_server.key rename to x-pack/test/idp-fixture/src/main/resources/openldap/certs/ca_server.key diff --git a/x-pack/test/idp-fixture/openldap/certs/ca_server.pem b/x-pack/test/idp-fixture/src/main/resources/openldap/certs/ca_server.pem similarity index 100% rename from x-pack/test/idp-fixture/openldap/certs/ca_server.pem rename to x-pack/test/idp-fixture/src/main/resources/openldap/certs/ca_server.pem diff --git a/x-pack/test/idp-fixture/openldap/certs/dhparam.pem b/x-pack/test/idp-fixture/src/main/resources/openldap/certs/dhparam.pem similarity index 100% rename from x-pack/test/idp-fixture/openldap/certs/dhparam.pem rename to x-pack/test/idp-fixture/src/main/resources/openldap/certs/dhparam.pem diff --git a/x-pack/test/idp-fixture/openldap/certs/ldap_server.csr b/x-pack/test/idp-fixture/src/main/resources/openldap/certs/ldap_server.csr similarity index 100% rename from x-pack/test/idp-fixture/openldap/certs/ldap_server.csr rename to x-pack/test/idp-fixture/src/main/resources/openldap/certs/ldap_server.csr diff --git a/x-pack/test/idp-fixture/openldap/certs/ldap_server.key b/x-pack/test/idp-fixture/src/main/resources/openldap/certs/ldap_server.key similarity index 100% rename from x-pack/test/idp-fixture/openldap/certs/ldap_server.key rename to x-pack/test/idp-fixture/src/main/resources/openldap/certs/ldap_server.key diff --git a/x-pack/test/idp-fixture/openldap/certs/ldap_server.pem b/x-pack/test/idp-fixture/src/main/resources/openldap/certs/ldap_server.pem similarity index 100% rename from x-pack/test/idp-fixture/openldap/certs/ldap_server.pem rename to x-pack/test/idp-fixture/src/main/resources/openldap/certs/ldap_server.pem diff --git a/x-pack/test/idp-fixture/openldap/certs/templates/ca_server.conf b/x-pack/test/idp-fixture/src/main/resources/openldap/certs/templates/ca_server.conf similarity index 100% rename from x-pack/test/idp-fixture/openldap/certs/templates/ca_server.conf rename to x-pack/test/idp-fixture/src/main/resources/openldap/certs/templates/ca_server.conf diff --git a/x-pack/test/idp-fixture/openldap/certs/templates/ldap_server.conf b/x-pack/test/idp-fixture/src/main/resources/openldap/certs/templates/ldap_server.conf similarity index 100% rename from x-pack/test/idp-fixture/openldap/certs/templates/ldap_server.conf rename to x-pack/test/idp-fixture/src/main/resources/openldap/certs/templates/ldap_server.conf diff --git a/x-pack/test/idp-fixture/openldap/ldif/config.ldif b/x-pack/test/idp-fixture/src/main/resources/openldap/ldif/config.ldif similarity index 100% rename from x-pack/test/idp-fixture/openldap/ldif/config.ldif rename to x-pack/test/idp-fixture/src/main/resources/openldap/ldif/config.ldif diff --git a/x-pack/test/idp-fixture/openldap/ldif/users.ldif b/x-pack/test/idp-fixture/src/main/resources/openldap/ldif/users.ldif similarity index 100% rename from x-pack/test/idp-fixture/openldap/ldif/users.ldif rename to x-pack/test/idp-fixture/src/main/resources/openldap/ldif/users.ldif diff --git a/x-pack/test/smb-fixture/Dockerfile b/x-pack/test/smb-fixture/Dockerfile deleted file mode 100644 index bcd74758ff496..0000000000000 --- a/x-pack/test/smb-fixture/Dockerfile +++ /dev/null @@ -1,12 +0,0 @@ -FROM ubuntu:16.04 -RUN apt-get update -qqy && apt-get install -qqy samba ldap-utils -ADD . /fixture -RUN chmod +x /fixture/src/main/resources/provision/installsmb.sh -RUN /fixture/src/main/resources/provision/installsmb.sh - -EXPOSE 389 -EXPOSE 636 -EXPOSE 3268 -EXPOSE 3269 - -CMD service samba-ad-dc restart && sleep infinity diff --git a/x-pack/test/smb-fixture/build.gradle b/x-pack/test/smb-fixture/build.gradle index 8740d94f26357..aeb5626ce9508 100644 --- a/x-pack/test/smb-fixture/build.gradle +++ b/x-pack/test/smb-fixture/build.gradle @@ -1 +1,13 @@ -apply plugin: 'elasticsearch.test.fixtures' +apply plugin: 'elasticsearch.java' +apply plugin: 'elasticsearch.cache-test-fixtures' + +dependencies { + api project(':test:fixtures:testcontainer-utils') + api "junit:junit:${versions.junit}" + api "org.testcontainers:testcontainers:${versions.testcontainer}" + api "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" + + // ensure we have proper logging during when used in tests + runtimeOnly "org.slf4j:slf4j-simple:${versions.slf4j}" + runtimeOnly "org.hamcrest:hamcrest:${versions.hamcrest}" +} diff --git a/x-pack/test/smb-fixture/docker-compose.yml b/x-pack/test/smb-fixture/docker-compose.yml deleted file mode 100644 index 51a76fd42b435..0000000000000 --- a/x-pack/test/smb-fixture/docker-compose.yml +++ /dev/null @@ -1,11 +0,0 @@ -version: '3' -services: - smb-fixture: - build: - context: . - dockerfile: Dockerfile - ports: - - "389" - - "636" - - "3268" - - "3269" diff --git a/x-pack/test/smb-fixture/src/main/java/org/elasticsearch/test/fixtures/smb/SmbTestContainer.java b/x-pack/test/smb-fixture/src/main/java/org/elasticsearch/test/fixtures/smb/SmbTestContainer.java new file mode 100644 index 0000000000000..10f589e4e1df3 --- /dev/null +++ b/x-pack/test/smb-fixture/src/main/java/org/elasticsearch/test/fixtures/smb/SmbTestContainer.java @@ -0,0 +1,53 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.test.fixtures.smb; + +import org.elasticsearch.test.fixtures.testcontainers.DockerEnvironmentAwareTestContainer; +import org.testcontainers.images.builder.ImageFromDockerfile; + +public final class SmbTestContainer extends DockerEnvironmentAwareTestContainer { + + private static final String DOCKER_BASE_IMAGE = "ubuntu:16.04"; + public static final int AD_LDAP_PORT = 636; + public static final int AD_LDAP_GC_PORT = 3269; + + public SmbTestContainer() { + super( + new ImageFromDockerfile("es-smb-fixture").withDockerfileFromBuilder( + builder -> builder.from(DOCKER_BASE_IMAGE) + .run("apt-get update -qqy && apt-get install -qqy samba ldap-utils") + .copy("fixture/provision/installsmb.sh", "/fixture/provision/installsmb.sh") + .copy("fixture/certs/ca.key", "/fixture/certs/ca.key") + .copy("fixture/certs/ca.pem", "/fixture/certs/ca.pem") + .copy("fixture/certs/cert.pem", "/fixture/certs/cert.pem") + .copy("fixture/certs/key.pem", "/fixture/certs/key.pem") + .run("chmod +x /fixture/provision/installsmb.sh") + .run("/fixture/provision/installsmb.sh") + .cmd("service samba-ad-dc restart && sleep infinity") + .build() + ) + .withFileFromClasspath("fixture/provision/installsmb.sh", "/smb/provision/installsmb.sh") + .withFileFromClasspath("fixture/certs/ca.key", "/smb/certs/ca.key") + .withFileFromClasspath("fixture/certs/ca.pem", "/smb/certs/ca.pem") + .withFileFromClasspath("fixture/certs/cert.pem", "/smb/certs/cert.pem") + .withFileFromClasspath("fixture/certs/key.pem", "/smb/certs/key.pem") + ); + // addExposedPort(389); + // addExposedPort(3268); + addExposedPort(AD_LDAP_PORT); + addExposedPort(AD_LDAP_GC_PORT); + } + + public String getAdLdapUrl() { + return "ldaps://localhost:" + getMappedPort(AD_LDAP_PORT); + } + + public String getAdLdapGcUrl() { + return "ldaps://localhost:" + getMappedPort(AD_LDAP_GC_PORT); + } +} diff --git a/x-pack/test/smb-fixture/src/main/resources/certs/README.asciidoc b/x-pack/test/smb-fixture/src/main/resources/smb/certs/README.asciidoc similarity index 100% rename from x-pack/test/smb-fixture/src/main/resources/certs/README.asciidoc rename to x-pack/test/smb-fixture/src/main/resources/smb/certs/README.asciidoc diff --git a/x-pack/test/smb-fixture/src/main/resources/certs/ca.key b/x-pack/test/smb-fixture/src/main/resources/smb/certs/ca.key similarity index 100% rename from x-pack/test/smb-fixture/src/main/resources/certs/ca.key rename to x-pack/test/smb-fixture/src/main/resources/smb/certs/ca.key diff --git a/x-pack/test/smb-fixture/src/main/resources/certs/ca.pem b/x-pack/test/smb-fixture/src/main/resources/smb/certs/ca.pem similarity index 100% rename from x-pack/test/smb-fixture/src/main/resources/certs/ca.pem rename to x-pack/test/smb-fixture/src/main/resources/smb/certs/ca.pem diff --git a/x-pack/test/smb-fixture/src/main/resources/certs/cert.pem b/x-pack/test/smb-fixture/src/main/resources/smb/certs/cert.pem similarity index 100% rename from x-pack/test/smb-fixture/src/main/resources/certs/cert.pem rename to x-pack/test/smb-fixture/src/main/resources/smb/certs/cert.pem diff --git a/x-pack/test/smb-fixture/src/main/resources/certs/key.pem b/x-pack/test/smb-fixture/src/main/resources/smb/certs/key.pem similarity index 100% rename from x-pack/test/smb-fixture/src/main/resources/certs/key.pem rename to x-pack/test/smb-fixture/src/main/resources/smb/certs/key.pem diff --git a/x-pack/test/smb-fixture/src/main/resources/provision/installsmb.sh b/x-pack/test/smb-fixture/src/main/resources/smb/provision/installsmb.sh similarity index 97% rename from x-pack/test/smb-fixture/src/main/resources/provision/installsmb.sh rename to x-pack/test/smb-fixture/src/main/resources/smb/provision/installsmb.sh index 0bc86e96530bc..463238b9f50c2 100644 --- a/x-pack/test/smb-fixture/src/main/resources/provision/installsmb.sh +++ b/x-pack/test/smb-fixture/src/main/resources/smb/provision/installsmb.sh @@ -8,8 +8,7 @@ set -ex VDIR=/fixture -RESOURCES=$VDIR/src/main/resources -CERTS_DIR=$RESOURCES/certs +CERTS_DIR=$VDIR/certs SSL_DIR=/var/lib/samba/private/tls # install ssl certs