Skip to content

Commit

Permalink
Merge branch 'main' into esql_base_javadoc
Browse files Browse the repository at this point in the history
  • Loading branch information
nik9000 committed Dec 27, 2023
2 parents 6a491ea + eec1c28 commit 0eeee7c
Show file tree
Hide file tree
Showing 1,428 changed files with 34,838 additions and 12,641 deletions.
34 changes: 34 additions & 0 deletions .buildkite/packer_cache.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
#!/bin/bash

ROOT_DIR=$(cd "$(dirname "$0")/.." && pwd)

branches=($(cat "$ROOT_DIR/branches.json" | jq -r '.branches[].branch'))
for branch in "${branches[@]}"; do
echo "Resolving dependencies for ${branch} branch"
rm -rf "checkout/$branch"
git clone /opt/git-mirrors/elastic-elasticsearch --branch "$branch" --single-branch "checkout/$branch"

CHECKOUT_DIR=$(cd "./checkout/${branch}" && pwd)
CI_DIR="$CHECKOUT_DIR/.ci"

if [ "$(uname -m)" = "arm64" ] || [ "$(uname -m)" = "aarch64" ]; then
## On ARM we use a different properties file for setting java home
## Also, we don't bother attempting to resolve dependencies for the 6.8 branch
source "$CI_DIR/java-versions-aarch64.properties"
export JAVA16_HOME="$HOME/.java/jdk16"
else
source "$CI_DIR/java-versions.properties"
## We are caching BWC versions too, need these so we can build those
export JAVA8_HOME="$HOME/.java/java8"
export JAVA11_HOME="$HOME/.java/java11"
export JAVA12_HOME="$HOME/.java/openjdk12"
export JAVA13_HOME="$HOME/.java/openjdk13"
export JAVA14_HOME="$HOME/.java/openjdk14"
export JAVA15_HOME="$HOME/.java/openjdk15"
export JAVA16_HOME="$HOME/.java/openjdk16"
fi

export JAVA_HOME="$HOME/.java/$ES_BUILD_JAVA"
"checkout/${branch}/gradlew" --project-dir "$CHECKOUT_DIR" --parallel -s resolveAllDependencies -Dorg.gradle.warning.mode=none -DisCI
rm -rf "checkout/${branch}"
done
2 changes: 1 addition & 1 deletion .buildkite/pipelines/intake.yml
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ steps:
timeout_in_minutes: 300
matrix:
setup:
BWC_VERSION: ["7.17.16", "8.11.3", "8.12.0", "8.13.0"]
BWC_VERSION: ["7.17.17", "8.11.4", "8.12.0", "8.13.0"]
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
Expand Down
32 changes: 32 additions & 0 deletions .buildkite/pipelines/periodic-packaging.yml
Original file line number Diff line number Diff line change
Expand Up @@ -1089,6 +1089,22 @@ steps:
env:
BWC_VERSION: 7.17.16

- label: "{{matrix.image}} / 7.17.17 / packaging-tests-upgrade"
command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v7.17.17
timeout_in_minutes: 300
matrix:
setup:
image:
- rocky-8
- ubuntu-2004
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
machineType: custom-16-32768
buildDirectory: /dev/shm/bk
env:
BWC_VERSION: 7.17.17

- label: "{{matrix.image}} / 8.0.0 / packaging-tests-upgrade"
command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.0.0
timeout_in_minutes: 300
Expand Down Expand Up @@ -1761,6 +1777,22 @@ steps:
env:
BWC_VERSION: 8.11.3

- label: "{{matrix.image}} / 8.11.4 / packaging-tests-upgrade"
command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.11.4
timeout_in_minutes: 300
matrix:
setup:
image:
- rocky-8
- ubuntu-2004
agents:
provider: gcp
image: family/elasticsearch-{{matrix.image}}
machineType: custom-16-32768
buildDirectory: /dev/shm/bk
env:
BWC_VERSION: 8.11.4

- label: "{{matrix.image}} / 8.12.0 / packaging-tests-upgrade"
command: ./.ci/scripts/packaging-test.sh -Dbwc.checkout.align=true destructiveDistroUpgradeTest.v8.12.0
timeout_in_minutes: 300
Expand Down
16 changes: 16 additions & 0 deletions .buildkite/pipelines/periodic-platform-support.yml
Original file line number Diff line number Diff line change
Expand Up @@ -80,3 +80,19 @@ steps:
diskName: /dev/sda1
env:
GRADLE_TASK: "{{matrix.GRADLE_TASK}}"
- group: platform-support-unix-aws
steps:
- label: "{{matrix.image}} / platform-support-aws"
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true functionalTests
timeout_in_minutes: 420
matrix:
setup:
image:
- amazonlinux-2023
agents:
provider: aws
imagePrefix: elasticsearch-{{matrix.image}}
instanceType: m6a.8xlarge
diskSizeGb: 350
diskType: gp3
diskName: /dev/sda1
20 changes: 20 additions & 0 deletions .buildkite/pipelines/periodic.yml
Original file line number Diff line number Diff line change
Expand Up @@ -662,6 +662,16 @@ steps:
buildDirectory: /dev/shm/bk
env:
BWC_VERSION: 7.17.16
- label: 7.17.17 / bwc
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v7.17.17#bwcTest
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
env:
BWC_VERSION: 7.17.17
- label: 8.0.0 / bwc
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.0.0#bwcTest
timeout_in_minutes: 300
Expand Down Expand Up @@ -1082,6 +1092,16 @@ steps:
buildDirectory: /dev/shm/bk
env:
BWC_VERSION: 8.11.3
- label: 8.11.4 / bwc
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.11.4#bwcTest
timeout_in_minutes: 300
agents:
provider: gcp
image: family/elasticsearch-ubuntu-2004
machineType: n1-standard-32
buildDirectory: /dev/shm/bk
env:
BWC_VERSION: 8.11.4
- label: 8.12.0 / bwc
command: .ci/scripts/run-gradle.sh -Dbwc.checkout.align=true v8.12.0#bwcTest
timeout_in_minutes: 300
Expand Down
15 changes: 15 additions & 0 deletions .buildkite/scripts/periodic.trigger.sh
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,26 @@ echo "steps:"

source .buildkite/scripts/branches.sh

IS_FIRST=true
SKIP_DELAY="${SKIP_DELAY:-false}"

for BRANCH in "${BRANCHES[@]}"; do
INTAKE_PIPELINE_SLUG="elasticsearch-intake"
BUILD_JSON=$(curl -sH "Authorization: Bearer ${BUILDKITE_API_TOKEN}" "https://api.buildkite.com/v2/organizations/elastic/pipelines/${INTAKE_PIPELINE_SLUG}/builds?branch=${BRANCH}&state=passed&per_page=1" | jq '.[0] | {commit: .commit, url: .web_url}')
LAST_GOOD_COMMIT=$(echo "${BUILD_JSON}" | jq -r '.commit')

# Put a delay between each branch's set of pipelines by prepending each non-first branch with a sleep
# This is to smooth out the spike in agent requests
if [[ "$IS_FIRST" != "true" && "$SKIP_DELAY" != "true" ]]; then
cat <<EOF
- command: sleep 540
soft_fail: true
- wait: ~
continue_on_failure: true
EOF
fi
IS_FIRST=false

cat <<EOF
- trigger: elasticsearch-periodic
label: Trigger periodic pipeline for $BRANCH
Expand Down
2 changes: 2 additions & 0 deletions .ci/bwcVersions
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,7 @@ BWC_VERSION:
- "7.17.14"
- "7.17.15"
- "7.17.16"
- "7.17.17"
- "8.0.0"
- "8.0.1"
- "8.1.0"
Expand Down Expand Up @@ -107,5 +108,6 @@ BWC_VERSION:
- "8.11.1"
- "8.11.2"
- "8.11.3"
- "8.11.4"
- "8.12.0"
- "8.13.0"
4 changes: 2 additions & 2 deletions .ci/snapshotBwcVersions
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
BWC_VERSION:
- "7.17.16"
- "8.11.3"
- "7.17.17"
- "8.11.4"
- "8.12.0"
- "8.13.0"
18 changes: 16 additions & 2 deletions .github/CODEOWNERS
Original file line number Diff line number Diff line change
Expand Up @@ -23,5 +23,19 @@ x-pack/plugin/core/src/main/resources/fleet-* @elastic/fleet
# Kibana Security
x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/KibanaOwnedReservedRoleDescriptors.java @elastic/kibana-security

# APM
x-pack/plugin/apm-data @elastic/apm-server
# APM Data index templates, etc.
x-pack/plugin/apm-data/src/main/resources @elastic/apm-server
x-pack/plugin/apm-data/src/yamlRestTest/resources @elastic/apm-server

# Delivery
gradle @elastic/es-delivery
build-conventions @elastic/es-delivery
build-tools @elastic/es-delivery
build-tools-internal @elastic/es-delivery
*.gradle @elastic/es-delivery
.buildkite @elastic/es-delivery
.ci @elastic/es-delivery
.idea @elastic/es-delivery
distribution/src @elastic/es-delivery
distribution/packages/src @elastic/es-delivery
distribution/docker/src @elastic/es-delivery
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,9 @@
import org.elasticsearch.compute.data.BlockFactory;
import org.elasticsearch.compute.data.BooleanBlock;
import org.elasticsearch.compute.data.BytesRefBlock;
import org.elasticsearch.compute.data.DoubleArrayVector;
import org.elasticsearch.compute.data.DoubleBlock;
import org.elasticsearch.compute.data.ElementType;
import org.elasticsearch.compute.data.IntBlock;
import org.elasticsearch.compute.data.LongArrayVector;
import org.elasticsearch.compute.data.LongBlock;
import org.elasticsearch.compute.data.Page;
import org.elasticsearch.compute.operator.AggregationOperator;
Expand Down Expand Up @@ -67,6 +65,10 @@ public class AggregatorBenchmark {
private static final int GROUPS = 5;

private static final BigArrays BIG_ARRAYS = BigArrays.NON_RECYCLING_INSTANCE; // TODO real big arrays?
private static final BlockFactory blockFactory = BlockFactory.getInstance(
new NoopCircuitBreaker("noop"),
BigArrays.NON_RECYCLING_INSTANCE
);

private static final String LONGS = "longs";
private static final String INTS = "ints";
Expand Down Expand Up @@ -116,8 +118,7 @@ public class AggregatorBenchmark {
@Param({ VECTOR_LONGS, HALF_NULL_LONGS, VECTOR_DOUBLES, HALF_NULL_DOUBLES })
public String blockType;

private static Operator operator(String grouping, String op, String dataType) {
DriverContext driverContext = driverContext();
private static Operator operator(DriverContext driverContext, String grouping, String op, String dataType) {
if (grouping.equals("none")) {
return new AggregationOperator(
List.of(supplier(op, dataType, 0).aggregatorFactory(AggregatorMode.SINGLE).apply(driverContext)),
Expand Down Expand Up @@ -432,24 +433,24 @@ private static void checkUngrouped(String prefix, String op, String dataType, Pa
}
}

private static Page page(String grouping, String blockType) {
Block dataBlock = dataBlock(blockType);
private static Page page(BlockFactory blockFactory, String grouping, String blockType) {
Block dataBlock = dataBlock(blockFactory, blockType);
if (grouping.equals("none")) {
return new Page(dataBlock);
}
List<Block> blocks = groupingBlocks(grouping, blockType);
return new Page(Stream.concat(blocks.stream(), Stream.of(dataBlock)).toArray(Block[]::new));
}

private static Block dataBlock(String blockType) {
private static Block dataBlock(BlockFactory blockFactory, String blockType) {
return switch (blockType) {
case VECTOR_LONGS -> new LongArrayVector(LongStream.range(0, BLOCK_LENGTH).toArray(), BLOCK_LENGTH).asBlock();
case VECTOR_DOUBLES -> new DoubleArrayVector(
case VECTOR_LONGS -> blockFactory.newLongArrayVector(LongStream.range(0, BLOCK_LENGTH).toArray(), BLOCK_LENGTH).asBlock();
case VECTOR_DOUBLES -> blockFactory.newDoubleArrayVector(
LongStream.range(0, BLOCK_LENGTH).mapToDouble(l -> Long.valueOf(l).doubleValue()).toArray(),
BLOCK_LENGTH
).asBlock();
case MULTIVALUED_LONGS -> {
var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH);
var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH);
builder.beginPositionEntry();
for (int i = 0; i < BLOCK_LENGTH; i++) {
builder.appendLong(i);
Expand All @@ -462,15 +463,15 @@ private static Block dataBlock(String blockType) {
yield builder.build();
}
case HALF_NULL_LONGS -> {
var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH);
var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH);
for (int i = 0; i < BLOCK_LENGTH; i++) {
builder.appendLong(i);
builder.appendNull();
}
yield builder.build();
}
case HALF_NULL_DOUBLES -> {
var builder = DoubleBlock.newBlockBuilder(BLOCK_LENGTH);
var builder = blockFactory.newDoubleBlockBuilder(BLOCK_LENGTH);
for (int i = 0; i < BLOCK_LENGTH; i++) {
builder.appendDouble(i);
builder.appendNull();
Expand Down Expand Up @@ -502,7 +503,7 @@ private static Block groupingBlock(String grouping, String blockType) {
};
return switch (grouping) {
case LONGS -> {
var builder = LongBlock.newBlockBuilder(BLOCK_LENGTH);
var builder = blockFactory.newLongBlockBuilder(BLOCK_LENGTH);
for (int i = 0; i < BLOCK_LENGTH; i++) {
for (int v = 0; v < valuesPerGroup; v++) {
builder.appendLong(i % GROUPS);
Expand All @@ -511,7 +512,7 @@ private static Block groupingBlock(String grouping, String blockType) {
yield builder.build();
}
case INTS -> {
var builder = IntBlock.newBlockBuilder(BLOCK_LENGTH);
var builder = blockFactory.newIntBlockBuilder(BLOCK_LENGTH);
for (int i = 0; i < BLOCK_LENGTH; i++) {
for (int v = 0; v < valuesPerGroup; v++) {
builder.appendInt(i % GROUPS);
Expand All @@ -520,7 +521,7 @@ private static Block groupingBlock(String grouping, String blockType) {
yield builder.build();
}
case DOUBLES -> {
var builder = DoubleBlock.newBlockBuilder(BLOCK_LENGTH);
var builder = blockFactory.newDoubleBlockBuilder(BLOCK_LENGTH);
for (int i = 0; i < BLOCK_LENGTH; i++) {
for (int v = 0; v < valuesPerGroup; v++) {
builder.appendDouble(i % GROUPS);
Expand All @@ -529,7 +530,7 @@ private static Block groupingBlock(String grouping, String blockType) {
yield builder.build();
}
case BOOLEANS -> {
BooleanBlock.Builder builder = BooleanBlock.newBlockBuilder(BLOCK_LENGTH);
BooleanBlock.Builder builder = blockFactory.newBooleanBlockBuilder(BLOCK_LENGTH);
for (int i = 0; i < BLOCK_LENGTH; i++) {
for (int v = 0; v < valuesPerGroup; v++) {
builder.appendBoolean(i % 2 == 1);
Expand All @@ -538,7 +539,7 @@ private static Block groupingBlock(String grouping, String blockType) {
yield builder.build();
}
case BYTES_REFS -> {
BytesRefBlock.Builder builder = BytesRefBlock.newBlockBuilder(BLOCK_LENGTH);
BytesRefBlock.Builder builder = blockFactory.newBytesRefBlockBuilder(BLOCK_LENGTH);
for (int i = 0; i < BLOCK_LENGTH; i++) {
for (int v = 0; v < valuesPerGroup; v++) {
builder.appendBytesRef(bytesGroup(i % GROUPS));
Expand Down Expand Up @@ -574,8 +575,9 @@ private static void run(String grouping, String op, String blockType, int opCoun
default -> throw new IllegalArgumentException();
};

Operator operator = operator(grouping, op, dataType);
Page page = page(grouping, blockType);
DriverContext driverContext = driverContext();
Operator operator = operator(driverContext, grouping, op, dataType);
Page page = page(driverContext.blockFactory(), grouping, blockType);
for (int i = 0; i < opCount; i++) {
operator.addInput(page);
}
Expand All @@ -584,9 +586,6 @@ private static void run(String grouping, String op, String blockType, int opCoun
}

static DriverContext driverContext() {
return new DriverContext(
BigArrays.NON_RECYCLING_INSTANCE,
BlockFactory.getInstance(new NoopCircuitBreaker("noop"), BigArrays.NON_RECYCLING_INSTANCE)
);
return new DriverContext(BigArrays.NON_RECYCLING_INSTANCE, blockFactory);
}
}
Loading

0 comments on commit 0eeee7c

Please sign in to comment.