From 1c7d17c295ee1db6c0fa872a50aa93d04ad243a5 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Mon, 24 Feb 2025 17:48:07 -0800 Subject: [PATCH 01/20] Remove unnecessary temp dir access (#123314) All modules have read/write access to the temp dir. This commit removes unnecessarily adding the temp dir explicitly to the server policy. --- .../initialization/EntitlementInitialization.java | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 96bf8d3ba6ab9..b4dd40001afc2 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -176,7 +176,6 @@ private static PolicyManager createPolicyManager() { new FilesEntitlement( List.of( // Base ES directories - FileData.ofPath(bootstrapArgs.tempDir(), READ_WRITE), FileData.ofPath(bootstrapArgs.configDir(), READ), FileData.ofPath(bootstrapArgs.logsDir(), READ_WRITE), FileData.ofRelativePath(Path.of(""), DATA, READ_WRITE), @@ -211,11 +210,7 @@ private static PolicyManager createPolicyManager() { new LoadNativeLibrariesEntitlement(), new ManageThreadsEntitlement(), new FilesEntitlement( - List.of( - FileData.ofPath(bootstrapArgs.configDir(), READ), - FileData.ofPath(bootstrapArgs.tempDir(), READ), - FileData.ofRelativePath(Path.of(""), DATA, READ_WRITE) - ) + List.of(FileData.ofPath(bootstrapArgs.configDir(), READ), FileData.ofRelativePath(Path.of(""), DATA, READ_WRITE)) ) ) ), From 688542b515f290c42b75dfeffdfd89b6fee50a10 Mon Sep 17 00:00:00 2001 From: Stanislav Malyshev Date: Mon, 24 Feb 2025 21:48:18 -0700 Subject: [PATCH 02/20] Increase await timeout for testCancelFailedSearchWhenPartialResultDisallowed (#123084) --- muted-tests.yml | 3 --- .../java/org/elasticsearch/search/SearchCancellationIT.java | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/muted-tests.yml b/muted-tests.yml index a6a5e1a1f8fad..c5a962741eb0f 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -257,9 +257,6 @@ tests: - class: org.elasticsearch.test.rest.ClientYamlTestSuiteIT method: test {yaml=snapshot.delete/10_basic/Delete a snapshot asynchronously} issue: https://github.com/elastic/elasticsearch/issues/122102 -- class: org.elasticsearch.search.SearchCancellationIT - method: testCancelFailedSearchWhenPartialResultDisallowed - issue: https://github.com/elastic/elasticsearch/issues/121719 - class: org.elasticsearch.datastreams.TSDBPassthroughIndexingIT issue: https://github.com/elastic/elasticsearch/issues/121716 - class: org.elasticsearch.smoketest.SmokeTestMonitoringWithSecurityIT diff --git a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java index 0cc1c89b36d19..8701c88f9d41d 100644 --- a/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java +++ b/server/src/internalClusterTest/java/org/elasticsearch/search/SearchCancellationIT.java @@ -267,7 +267,7 @@ public void testCancelFailedSearchWhenPartialResultDisallowed() throws Exception if (letOneShardProceed.compareAndSet(false, true)) { // Let one shard continue. } else { - safeAwait(shardTaskLatch); // Block the other shards. + safeAwait(shardTaskLatch, TimeValue.timeValueSeconds(30)); // Block the other shards. } }); } From 4a6343ad0b8d9212ad0a5606b546c075ff33bdb9 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 25 Feb 2025 07:39:14 +0000 Subject: [PATCH 03/20] Update minimum transport version in `main` (#123254) Versions 9.1.0 onwards will not be wire-compatible with versions before 8.19.0. This commit sets the minimum transport version to reject handshakes from earlier versions. --- server/src/main/java/org/elasticsearch/TransportVersions.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/elasticsearch/TransportVersions.java b/server/src/main/java/org/elasticsearch/TransportVersions.java index 35cd890af7fc9..05afa77e4391c 100644 --- a/server/src/main/java/org/elasticsearch/TransportVersions.java +++ b/server/src/main/java/org/elasticsearch/TransportVersions.java @@ -264,7 +264,7 @@ static TransportVersion def(int id) { * Reference to the earliest compatible transport version to this version of the codebase. * This should be the transport version used by the highest minor version of the previous major. */ - public static final TransportVersion MINIMUM_COMPATIBLE = BYTE_SIZE_VALUE_ALWAYS_USES_BYTES_1; + public static final TransportVersion MINIMUM_COMPATIBLE = INITIAL_ELASTICSEARCH_8_19; /** * Reference to the minimum transport version that can be used with CCS. From 5b0591e04a03d776031d1b4391e56e0332b7f362 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 25 Feb 2025 00:05:36 -0800 Subject: [PATCH 04/20] Consider entitlement lib as system module (#123315) * Consider entitlement lib as system module Entitlements sometimes needs to perform sensitive operations, particularly within the FileAccessTree. This commit expands the trivially allowed check to include entitlements as one of the system modules alongside the jdk. One consequence is that the self test must be moved outside entitlements. * [CI] Auto commit changes from spotless * remove old method call --------- Co-authored-by: elasticsearchmachine Co-authored-by: Elastic Machine --- .../bootstrap/EntitlementBootstrap.java | 49 ------------------- .../runtime/policy/PolicyManager.java | 16 +++--- .../bootstrap/Elasticsearch.java | 32 ++++++++++++ 3 files changed, 38 insertions(+), 59 deletions(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java index 4f37362d9325a..8610d9f3be66f 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -14,16 +14,13 @@ import com.sun.tools.attach.AttachNotSupportedException; import com.sun.tools.attach.VirtualMachine; -import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.initialization.EntitlementInitialization; -import org.elasticsearch.entitlement.runtime.api.NotEntitledException; import org.elasticsearch.entitlement.runtime.policy.Policy; import org.elasticsearch.logging.LogManager; import org.elasticsearch.logging.Logger; import java.io.IOException; -import java.lang.reflect.InvocationTargetException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Map; @@ -114,7 +111,6 @@ public static void bootstrap( ); exportInitializationToAgent(); loadAgent(findAgentJar()); - selfTest(); } @SuppressForbidden(reason = "The VirtualMachine API is the only way to attach a java agent dynamically") @@ -160,50 +156,5 @@ private static String findAgentJar() { } } - /** - * Attempt a few sensitive operations to ensure that some are permitted and some are forbidden. - *

- * - * This serves two purposes: - * - *

    - *
  1. - * a smoke test to make sure the entitlements system is not completely broken, and - *
  2. - *
  3. - * an early test of certain important operations so they don't fail later on at an awkward time. - *
  4. - *
- * - * @throws IllegalStateException if the entitlements system can't prevent an unauthorized action of our choosing - */ - private static void selfTest() { - ensureCannotStartProcess(ProcessBuilder::start); - // Try again with reflection - ensureCannotStartProcess(EntitlementBootstrap::reflectiveStartProcess); - } - - private static void ensureCannotStartProcess(CheckedConsumer startProcess) { - try { - // The command doesn't matter; it doesn't even need to exist - startProcess.accept(new ProcessBuilder("")); - } catch (NotEntitledException e) { - logger.debug("Success: Entitlement protection correctly prevented process creation"); - return; - } catch (Exception e) { - throw new IllegalStateException("Failed entitlement protection self-test", e); - } - throw new IllegalStateException("Entitlement protection self-test was incorrectly permitted"); - } - - private static void reflectiveStartProcess(ProcessBuilder pb) throws Exception { - try { - var start = ProcessBuilder.class.getMethod("start"); - start.invoke(pb); - } catch (InvocationTargetException e) { - throw (Exception) e.getCause(); - } - } - private static final Logger logger = LogManager.getLogger(EntitlementBootstrap.class); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index 2aafcfc594abd..b6296fe5d4713 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -12,7 +12,6 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; -import org.elasticsearch.entitlement.bridge.EntitlementChecker; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.runtime.api.NotEntitledException; import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; @@ -126,11 +125,12 @@ private static Set findSystemModules() { .stream() .map(ModuleReference::descriptor) .collect(Collectors.toUnmodifiableSet()); - return ModuleLayer.boot() - .modules() - .stream() - .filter(m -> systemModulesDescriptors.contains(m.getDescriptor())) - .collect(Collectors.toUnmodifiableSet()); + return Stream.concat( + // entitlements is a "system" module, we can do anything from it + Stream.of(PolicyManager.class.getModule()), + // anything in the boot layer is also part of the system + ModuleLayer.boot().modules().stream().filter(m -> systemModulesDescriptors.contains(m.getDescriptor())) + ).collect(Collectors.toUnmodifiableSet()); } /** @@ -564,10 +564,6 @@ private static boolean isTriviallyAllowed(Class requestingClass) { logger.debug("Entitlement trivially allowed from system module [{}]", requestingClass.getModule().getName()); return true; } - if (EntitlementChecker.class.isAssignableFrom(requestingClass)) { - logger.debug("Entitlement trivially allowed for EntitlementChecker class"); - return true; - } logger.trace("Entitlement not trivially allowed"); return false; } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 3b67b86eff015..37dfd46db581a 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -29,9 +29,11 @@ import org.elasticsearch.common.util.concurrent.RunOnce; import org.elasticsearch.core.AbstractRefCounted; import org.elasticsearch.core.Booleans; +import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.IOUtils; import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; +import org.elasticsearch.entitlement.runtime.api.NotEntitledException; import org.elasticsearch.entitlement.runtime.policy.Policy; import org.elasticsearch.entitlement.runtime.policy.PolicyParserUtils; import org.elasticsearch.entitlement.runtime.policy.entitlements.LoadNativeLibrariesEntitlement; @@ -54,6 +56,7 @@ import java.io.InputStream; import java.io.PrintStream; import java.lang.invoke.MethodHandles; +import java.lang.reflect.InvocationTargetException; import java.nio.file.Files; import java.nio.file.Path; import java.security.Permission; @@ -254,6 +257,7 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { nodeEnv.logsDir(), nodeEnv.tmpDir() ); + entitlementSelfTest(); } else { assert RuntimeVersionFeature.isSecurityManagerAvailable(); // no need to explicitly enable native access for legacy code @@ -270,6 +274,34 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { bootstrap.setPluginsLoader(pluginsLoader); } + // check entitlements were loaded correctly. note this must be outside the entitlements lib. + private static void entitlementSelfTest() { + ensureCannotStartProcess(ProcessBuilder::start); + // Try again with reflection + ensureCannotStartProcess(Elasticsearch::reflectiveStartProcess); + } + + private static void ensureCannotStartProcess(CheckedConsumer startProcess) { + try { + // The command doesn't matter; it doesn't even need to exist + startProcess.accept(new ProcessBuilder("")); + } catch (NotEntitledException e) { + return; + } catch (Exception e) { + throw new IllegalStateException("Failed entitlement protection self-test", e); + } + throw new IllegalStateException("Entitlement protection self-test was incorrectly permitted"); + } + + private static void reflectiveStartProcess(ProcessBuilder pb) throws Exception { + try { + var start = ProcessBuilder.class.getMethod("start"); + start.invoke(pb); + } catch (InvocationTargetException e) { + throw (Exception) e.getCause(); + } + } + private static void ensureInitialized(Class... classes) { for (final var clazz : classes) { try { From 4bd1f81ef90a283a9271f8d079ecf2196b541ca9 Mon Sep 17 00:00:00 2001 From: Rene Groeschke Date: Tue, 25 Feb 2025 09:30:42 +0100 Subject: [PATCH 05/20] Move eclipse specific extention property into elasticsearch eclipse plugin (#123320) --- .../internal/conventions/EclipseConventionPlugin.java | 10 ++++++++++ build.gradle | 9 --------- 2 files changed, 10 insertions(+), 9 deletions(-) diff --git a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/EclipseConventionPlugin.java b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/EclipseConventionPlugin.java index 58b183fac3155..48465cb08cc79 100644 --- a/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/EclipseConventionPlugin.java +++ b/build-conventions/src/main/java/org/elasticsearch/gradle/internal/conventions/EclipseConventionPlugin.java @@ -15,6 +15,7 @@ import org.gradle.api.Plugin; import org.gradle.api.Project; import org.gradle.api.Transformer; +import org.gradle.api.invocation.Gradle; import org.gradle.api.plugins.JavaBasePlugin; import org.gradle.api.plugins.JavaPluginExtension; import org.gradle.api.tasks.Copy; @@ -38,6 +39,15 @@ public class EclipseConventionPlugin implements Plugin { @Override public void apply(Project project) { project.getPlugins().apply(EclipsePlugin.class); + Gradle gradle = project.getGradle(); + + boolean isEclipse = project.getProviders().systemProperty("eclipse.launcher").isPresent() || // Gradle launched from Eclipse + project.getProviders().systemProperty("eclipse.application").isPresent() || // Gradle launched from the Eclipse compiler server + gradle.getStartParameter().getTaskNames().contains("eclipse") || // Gradle launched from the command line to do eclipse stuff + gradle.getStartParameter().getTaskNames().contains("cleanEclipse"); + // for eclipse ide specific hacks... + project.getExtensions().add("isEclipse", isEclipse); + EclipseModel eclipseModel = project.getExtensions().getByType(EclipseModel.class); EclipseProject eclipseProject = eclipseModel.getProject(); diff --git a/build.gradle b/build.gradle index 440032675213a..1f8f11fe05110 100644 --- a/build.gradle +++ b/build.gradle @@ -247,15 +247,6 @@ allprojects { } } - // injecting groovy property variables into all projects - project.ext { - // for ide hacks... - isEclipse = providers.systemProperty("eclipse.launcher").isPresent() || // Detects gradle launched from Eclipse's IDE - providers.systemProperty("eclipse.application").isPresent() || // Detects gradle launched from the Eclipse compiler server - gradle.startParameter.taskNames.contains('eclipse') || // Detects gradle launched from the command line to do eclipse stuff - gradle.startParameter.taskNames.contains('cleanEclipse') - } - ext.bwc_tests_enabled = bwc_tests_enabled // eclipse configuration From 9aaf07242c0e76dcb4739a1d3f8b5a9f26b2e55c Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 25 Feb 2025 01:18:56 -0800 Subject: [PATCH 06/20] Add pidfile access for server (#123313) --- .../bootstrap/EntitlementBootstrap.java | 10 +++- .../EntitlementInitialization.java | 58 ++++++++++--------- .../bootstrap/Elasticsearch.java | 3 +- 3 files changed, 40 insertions(+), 31 deletions(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java index 8610d9f3be66f..06e985913c9b4 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -41,7 +41,8 @@ public record BootstrapArgs( Path configDir, Path libDir, Path logsDir, - Path tempDir + Path tempDir, + Path pidFile ) { public BootstrapArgs { requireNonNull(pluginPolicies); @@ -80,6 +81,7 @@ public static BootstrapArgs bootstrapArgs() { * @param libDir the lib directory for Elasticsearch * @param tempDir the temp directory for Elasticsearch * @param logsDir the log directory for Elasticsearch + * @param pidFile path to a pid file for Elasticsearch, or {@code null} if one was not specified */ public static void bootstrap( Map pluginPolicies, @@ -91,7 +93,8 @@ public static void bootstrap( Path configDir, Path libDir, Path logsDir, - Path tempDir + Path tempDir, + Path pidFile ) { logger.debug("Loading entitlement agent"); if (EntitlementBootstrap.bootstrapArgs != null) { @@ -107,7 +110,8 @@ public static void bootstrap( configDir, libDir, logsDir, - tempDir + tempDir, + pidFile ); exportInitializationToAgent(); loadAgent(findAgentJar()); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index b4dd40001afc2..e1749eb2a4bb3 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -148,6 +148,36 @@ private static PolicyManager createPolicyManager() { ); List serverScopes = new ArrayList<>(); + List serverModuleFileDatas = new ArrayList<>(); + Collections.addAll( + serverModuleFileDatas, + // Base ES directories + FileData.ofPath(bootstrapArgs.configDir(), READ), + FileData.ofPath(bootstrapArgs.logsDir(), READ_WRITE), + FileData.ofRelativePath(Path.of(""), DATA, READ_WRITE), + FileData.ofRelativePath(Path.of(""), SHARED_REPO, READ_WRITE), + + // OS release on Linux + FileData.ofPath(Path.of("/etc/os-release"), READ).withPlatform(LINUX), + FileData.ofPath(Path.of("/etc/system-release"), READ).withPlatform(LINUX), + FileData.ofPath(Path.of("/usr/lib/os-release"), READ).withPlatform(LINUX), + // read max virtual memory areas + FileData.ofPath(Path.of("/proc/sys/vm/max_map_count"), READ).withPlatform(LINUX), + FileData.ofPath(Path.of("/proc/meminfo"), READ).withPlatform(LINUX), + // load averages on Linux + FileData.ofPath(Path.of("/proc/loadavg"), READ).withPlatform(LINUX), + // control group stats on Linux. cgroup v2 stats are in an unpredicable + // location under `/sys/fs/cgroup`, so unfortunately we have to allow + // read access to the entire directory hierarchy. + FileData.ofPath(Path.of("/proc/self/cgroup"), READ).withPlatform(LINUX), + FileData.ofPath(Path.of("/sys/fs/cgroup/"), READ).withPlatform(LINUX), + // // io stats on Linux + FileData.ofPath(Path.of("/proc/self/mountinfo"), READ).withPlatform(LINUX), + FileData.ofPath(Path.of("/proc/diskstats"), READ).withPlatform(LINUX) + ); + if (bootstrapArgs.pidFile() != null) { + serverModuleFileDatas.add(FileData.ofPath(bootstrapArgs.pidFile(), READ_WRITE)); + } Collections.addAll( serverScopes, new Scope( @@ -173,33 +203,7 @@ private static PolicyManager createPolicyManager() { new OutboundNetworkEntitlement(), new LoadNativeLibrariesEntitlement(), new ManageThreadsEntitlement(), - new FilesEntitlement( - List.of( - // Base ES directories - FileData.ofPath(bootstrapArgs.configDir(), READ), - FileData.ofPath(bootstrapArgs.logsDir(), READ_WRITE), - FileData.ofRelativePath(Path.of(""), DATA, READ_WRITE), - FileData.ofRelativePath(Path.of(""), SHARED_REPO, READ_WRITE), - - // OS release on Linux - FileData.ofPath(Path.of("/etc/os-release"), READ).withPlatform(LINUX), - FileData.ofPath(Path.of("/etc/system-release"), READ).withPlatform(LINUX), - FileData.ofPath(Path.of("/usr/lib/os-release"), READ).withPlatform(LINUX), - // read max virtual memory areas - FileData.ofPath(Path.of("/proc/sys/vm/max_map_count"), READ).withPlatform(LINUX), - FileData.ofPath(Path.of("/proc/meminfo"), READ).withPlatform(LINUX), - // load averages on Linux - FileData.ofPath(Path.of("/proc/loadavg"), READ).withPlatform(LINUX), - // control group stats on Linux. cgroup v2 stats are in an unpredicable - // location under `/sys/fs/cgroup`, so unfortunately we have to allow - // read access to the entire directory hierarchy. - FileData.ofPath(Path.of("/proc/self/cgroup"), READ).withPlatform(LINUX), - FileData.ofPath(Path.of("/sys/fs/cgroup/"), READ).withPlatform(LINUX), - // // io stats on Linux - FileData.ofPath(Path.of("/proc/self/mountinfo"), READ).withPlatform(LINUX), - FileData.ofPath(Path.of("/proc/diskstats"), READ).withPlatform(LINUX) - ) - ) + new FilesEntitlement(serverModuleFileDatas) ) ), new Scope("org.apache.httpcomponents.httpclient", List.of(new OutboundNetworkEntitlement())), diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 37dfd46db581a..05072ae940ae4 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -255,7 +255,8 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { nodeEnv.configDir(), nodeEnv.libDir(), nodeEnv.logsDir(), - nodeEnv.tmpDir() + nodeEnv.tmpDir(), + args.pidFile() ); entitlementSelfTest(); } else { From 12fcdd8633f03159a8f1505d2f828ed0bba06709 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Tue, 25 Feb 2025 10:19:15 +0100 Subject: [PATCH 07/20] Some missing entitlements preventing serverless to start (#123271) --- .../entitlement/initialization/EntitlementInitialization.java | 4 +++- .../security/src/main/plugin-metadata/entitlement-policy.yaml | 3 +++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index e1749eb2a4bb3..0fef2c27df663 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -255,7 +255,9 @@ private static PolicyManager createPolicyManager() { new FilesEntitlement( List.of( FileData.ofPath(Path.of("/co/elastic/apm/agent/"), READ), - FileData.ofPath(Path.of("/agent/co/elastic/apm/agent/"), READ) + FileData.ofPath(Path.of("/agent/co/elastic/apm/agent/"), READ), + FileData.ofPath(Path.of("/proc/meminfo"), READ), + FileData.ofPath(Path.of("/sys/fs/cgroup/"), READ) ) ) ); diff --git a/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml b/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml index 99dd7d5c1380f..a6f29cb2ad7ea 100644 --- a/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml +++ b/x-pack/plugin/security/src/main/plugin-metadata/entitlement-policy.yaml @@ -37,3 +37,6 @@ org.opensaml.saml.impl: - relative_path: metadata.xml relative_to: config mode: read + - relative_path: "saml/" + relative_to: config + mode: read From ec82c24a8722e7c4b50392567bafef7d96051849 Mon Sep 17 00:00:00 2001 From: Craig Taverner Date: Tue, 25 Feb 2025 11:38:51 +0100 Subject: [PATCH 08/20] Add support to VALUES aggregation for spatial types (#122886) The original work at https://github.com/elastic/elasticsearch/pull/106065 did not support geospatial types with this comment: > I made this work for everything but geo_point and cartesian_point because I'm not 100% sure how to integrate with those. We can grab those in a follow up. The geospatial types should be possible to collect using the VALUES aggregation with similar behavior to the `ST_COLLECT` OGC function, based on the Elasticsearch convention that treats multi-value geospatial fields as behaving similarly to any geometry collection. So this implementation is a trivial addition to the existing values types support. --- docs/changelog/122886.yaml | 6 ++ .../functions/kibana/definition/values.json | 48 ++++++++++++++ x-pack/plugin/build.gradle | 2 + .../src/main/resources/spatial.csv-spec | 64 +++++++++++++++++++ .../xpack/esql/action/EsqlCapabilities.java | 5 ++ .../esql/expression/EsqlTypeResolutions.java | 11 ---- .../expression/function/aggregate/Values.java | 43 ++++++++++--- .../function/AbstractFunctionTestCase.java | 8 ++- .../function/aggregate/ValuesErrorTests.java | 2 +- .../function/aggregate/ValuesTests.java | 7 +- .../rest-api-spec/test/esql/130_spatial.yml | 29 +++++---- 11 files changed, 189 insertions(+), 36 deletions(-) create mode 100644 docs/changelog/122886.yaml diff --git a/docs/changelog/122886.yaml b/docs/changelog/122886.yaml new file mode 100644 index 0000000000000..7306a21d1470e --- /dev/null +++ b/docs/changelog/122886.yaml @@ -0,0 +1,6 @@ +pr: 122886 +summary: Add support to VALUES aggregation for spatial types +area: ES|QL +type: bug +issues: + - 122413 diff --git a/docs/reference/esql/functions/kibana/definition/values.json b/docs/reference/esql/functions/kibana/definition/values.json index 95ac402bb242a..0ac74c61cf73d 100644 --- a/docs/reference/esql/functions/kibana/definition/values.json +++ b/docs/reference/esql/functions/kibana/definition/values.json @@ -16,6 +16,30 @@ "variadic" : false, "returnType" : "boolean" }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "cartesian_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "cartesian_shape" + }, { "params" : [ { @@ -52,6 +76,30 @@ "variadic" : false, "returnType" : "double" }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_point", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_point" + }, + { + "params" : [ + { + "name" : "field", + "type" : "geo_shape", + "optional" : false, + "description" : "" + } + ], + "variadic" : false, + "returnType" : "geo_shape" + }, { "params" : [ { diff --git a/x-pack/plugin/build.gradle b/x-pack/plugin/build.gradle index d3052cb191a06..784eed9bf0fa6 100644 --- a/x-pack/plugin/build.gradle +++ b/x-pack/plugin/build.gradle @@ -102,6 +102,8 @@ tasks.named("yamlRestCompatTestTransform").configure({ task -> task.skipTest("esql/190_lookup_join/alias-pattern-multiple", "LOOKUP JOIN does not support index aliases for now") task.skipTest("esql/190_lookup_join/alias-pattern-single", "LOOKUP JOIN does not support index aliases for now") task.skipTest("esql/180_match_operator/match with disjunctions", "Disjunctions in full text functions work now") + task.skipTest("esql/130_spatial/values unsupported for geo_point", "Spatial types are now supported in VALUES aggregation") + task.skipTest("esql/130_spatial/values unsupported for geo_point status code", "Spatial types are now supported in VALUES aggregation") // Expected deprecation warning to compat yaml tests: task.addAllowedWarningRegex(".*rollup functionality will be removed in Elasticsearch.*") task.skipTest("esql/40_tsdb/from doc with aggregate_metric_double", "TODO: support for subset of metric fields") diff --git a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec index 8718112979ce6..b5c588a1b486f 100644 --- a/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec +++ b/x-pack/plugin/esql/qa/testFixtures/src/main/resources/spatial.csv-spec @@ -144,6 +144,70 @@ c:long | x:double | y:double 19 | null | null ; +values +required_capability: agg_values_spatial + +FROM airports +| WHERE scalerank == 9 +| STATS locations=VALUES(location) +| EVAL locations = MV_SORT(TO_STRING(locations)) +; + +locations:keyword +[POINT (101.446569298441 0.464600872998505), POINT (105.176060419161 -5.242566777132), POINT (112.711418617258 -7.92998002840567), POINT (126.810839481226 35.1400051390198), POINT (127.495916124681 36.7220227766673), POINT (128.637537699933 35.8999277969087), POINT (129.355731047528 35.5928957527107), POINT (145.243980298582 14.1717712971216), POINT (35.3018728575279 47.8732635579023), POINT (42.97109630194 14.7552534413725), POINT (48.7471065435931 31.3431585560757), POINT (60.900708564915 29.4752941956573), POINT (61.5122589740201 55.2977919496055), POINT (63.0279333519181 25.988794590011), POINT (66.9487311480949 30.249043186181), POINT (72.9878190922305 31.3627435480862), POINT (73.0320498392002 33.5614146278861), POINT (73.3163595376585 54.9576482934059), POINT (73.4084964764375 61.3401672194481), POINT (73.8105674924689 19.9660205672806), POINT (75.3958432922005 19.8672969621082), POINT (75.7584828456005 31.4329422397715), POINT (75.8092915005895 22.727749187571), POINT (75.9330597710755 17.625415183635), POINT (75.9570722403652 30.8503598561702), POINT (76.8017261105242 30.6707248949667), POINT (78.2172186546348 26.285487697937), POINT (78.7089578747476 10.7603571306554), POINT (79.452002687657 28.4218087161144), POINT (81.7317271462187 25.443522027821), POINT (82.6671524525865 55.0095847136264), POINT (83.5504532124038 53.3633850813046), POINT (85.3235970368767 23.3177245989962)] +; + +valuesGrouped +required_capability: agg_values_spatial + +FROM airports +| WHERE scalerank == 9 +| EVAL first_letter = SUBSTRING(abbrev, 0, 1) +| STATS locations=VALUES(location) BY first_letter +| EVAL locations = MV_SORT(TO_STRING(locations)) +| SORT first_letter +| KEEP first_letter, locations +; + +first_letter:keyword | locations:keyword +A | POINT (48.7471065435931 31.3431585560757) +B | POINT (83.5504532124038 53.3633850813046) +C | [POINT (127.495916124681 36.7220227766673), POINT (61.5122589740201 55.2977919496055)] +G | POINT (78.2172186546348 26.285487697937) +H | POINT (42.97109630194 14.7552534413725) +I | [POINT (73.8105674924689 19.9660205672806), POINT (75.3958432922005 19.8672969621082), POINT (75.8092915005895 22.727749187571), POINT (76.8017261105242 30.6707248949667), POINT (81.7317271462187 25.443522027821), POINT (85.3235970368767 23.3177245989962)] +K | POINT (126.810839481226 35.1400051390198) +L | [POINT (72.9878190922305 31.3627435480862), POINT (75.9570722403652 30.8503598561702)] +M | POINT (112.711418617258 -7.92998002840567) +O | [POINT (35.3018728575279 47.8732635579023), POINT (73.0320498392002 33.5614146278861), POINT (73.3163595376585 54.9576482934059), POINT (82.6671524525865 55.0095847136264)] +P | POINT (101.446569298441 0.464600872998505) +R | POINT (145.243980298582 14.1717712971216) +S | [POINT (73.4084964764375 61.3401672194481), POINT (75.9330597710755 17.625415183635)] +T | [POINT (128.637537699933 35.8999277969087), POINT (63.0279333519181 25.988794590011), POINT (78.7089578747476 10.7603571306554)] +U | [POINT (129.355731047528 35.5928957527107), POINT (66.9487311480949 30.249043186181)] +V | [POINT (75.7584828456005 31.4329422397715), POINT (79.452002687657 28.4218087161144)] +W | POINT (105.176060419161 -5.242566777132) +Z | POINT (60.900708564915 29.4752941956573) +; + +valuesGroupedByOrdinals +required_capability: agg_values_spatial + +FROM airports +| WHERE scalerank == 9 +| STATS locations=VALUES(location) BY type +| EVAL locations = MV_SORT(TO_STRING(locations)) +| SORT type +| KEEP type, locations +; + +type:keyword | locations:keyword +major | [POINT (127.495916124681 36.7220227766673), POINT (76.8017261105242 30.6707248949667)] +mid | [POINT (101.446569298441 0.464600872998505), POINT (105.176060419161 -5.242566777132), POINT (112.711418617258 -7.92998002840567), POINT (126.810839481226 35.1400051390198), POINT (128.637537699933 35.8999277969087), POINT (129.355731047528 35.5928957527107), POINT (145.243980298582 14.1717712971216), POINT (35.3018728575279 47.8732635579023), POINT (42.97109630194 14.7552534413725), POINT (48.7471065435931 31.3431585560757), POINT (60.900708564915 29.4752941956573), POINT (61.5122589740201 55.2977919496055), POINT (63.0279333519181 25.988794590011), POINT (66.9487311480949 30.249043186181), POINT (72.9878190922305 31.3627435480862), POINT (73.3163595376585 54.9576482934059), POINT (73.4084964764375 61.3401672194481), POINT (73.8105674924689 19.9660205672806), POINT (75.3958432922005 19.8672969621082), POINT (75.7584828456005 31.4329422397715), POINT (75.8092915005895 22.727749187571), POINT (75.9330597710755 17.625415183635), POINT (78.2172186546348 26.285487697937), POINT (78.7089578747476 10.7603571306554), POINT (82.6671524525865 55.0095847136264), POINT (83.5504532124038 53.3633850813046), POINT (85.3235970368767 23.3177245989962)] +military | [POINT (112.711418617258 -7.92998002840567), POINT (126.810839481226 35.1400051390198), POINT (35.3018728575279 47.8732635579023), POINT (72.9878190922305 31.3627435480862), POINT (75.7584828456005 31.4329422397715), POINT (76.8017261105242 30.6707248949667), POINT (78.2172186546348 26.285487697937), POINT (79.452002687657 28.4218087161144), POINT (81.7317271462187 25.443522027821)] +small | [POINT (73.0320498392002 33.5614146278861), POINT (75.9570722403652 30.8503598561702)] +; + ############################################### # Tests for ST_CENTROID_AGG on GEO_POINT type diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java index 5f88f9f348276..06764454b1428 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/action/EsqlCapabilities.java @@ -81,6 +81,11 @@ public enum Cap { */ AGG_VALUES, + /** + * Expand the {@code VALUES} agg to cover spatial types. + */ + AGG_VALUES_SPATIAL, + /** * Does ESQL support async queries. */ diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java index b97374d179a44..23f875e4a0595 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/EsqlTypeResolutions.java @@ -63,12 +63,6 @@ public static Expression.TypeResolution isExact(Expression e, String operationNa GEO_SHAPE.typeName(), CARTESIAN_SHAPE.typeName() }; private static final String[] POINT_TYPE_NAMES = new String[] { GEO_POINT.typeName(), CARTESIAN_POINT.typeName() }; - private static final String[] NON_SPATIAL_TYPE_NAMES = DataType.types() - .stream() - .filter(DataType::isRepresentable) - .filter(t -> DataType.isSpatial(t) == false) - .map(DataType::esType) - .toArray(String[]::new); public static Expression.TypeResolution isSpatialPoint(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { return isType(e, DataType::isSpatialPoint, operationName, paramOrd, POINT_TYPE_NAMES); @@ -77,9 +71,4 @@ public static Expression.TypeResolution isSpatialPoint(Expression e, String oper public static Expression.TypeResolution isSpatial(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { return isType(e, DataType::isSpatial, operationName, paramOrd, SPATIAL_TYPE_NAMES); } - - public static Expression.TypeResolution isNotSpatial(Expression e, String operationName, TypeResolutions.ParamOrdinal paramOrd) { - return isType(e, t -> DataType.isSpatial(t) == false, operationName, paramOrd, NON_SPATIAL_TYPE_NAMES); - } - } diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java index 4dbe0e93b5017..6ffee21411acb 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/expression/function/aggregate/Values.java @@ -50,11 +50,28 @@ public class Values extends AggregateFunction implements ToAggregator { Map.entry(DataType.SEMANTIC_TEXT, ValuesBytesRefAggregatorFunctionSupplier::new), Map.entry(DataType.IP, ValuesBytesRefAggregatorFunctionSupplier::new), Map.entry(DataType.VERSION, ValuesBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.GEO_POINT, ValuesBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.CARTESIAN_POINT, ValuesBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.GEO_SHAPE, ValuesBytesRefAggregatorFunctionSupplier::new), + Map.entry(DataType.CARTESIAN_SHAPE, ValuesBytesRefAggregatorFunctionSupplier::new), Map.entry(DataType.BOOLEAN, ValuesBooleanAggregatorFunctionSupplier::new) ); @FunctionInfo( - returnType = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "version" }, + returnType = { + "boolean", + "cartesian_point", + "cartesian_shape", + "date", + "date_nanos", + "double", + "geo_point", + "geo_shape", + "integer", + "ip", + "keyword", + "long", + "version" }, preview = true, description = "Returns all values in a group as a multivalued field. The order of the returned values isn't guaranteed. " + "If you need the values returned in order use <>.", @@ -74,7 +91,21 @@ public Values( Source source, @Param( name = "field", - type = { "boolean", "date", "date_nanos", "double", "integer", "ip", "keyword", "long", "text", "version" } + type = { + "boolean", + "cartesian_point", + "cartesian_shape", + "date", + "date_nanos", + "double", + "geo_point", + "geo_shape", + "integer", + "ip", + "keyword", + "long", + "text", + "version" } ) Expression v ) { this(source, v, Literal.TRUE); @@ -115,13 +146,7 @@ public DataType dataType() { @Override protected TypeResolution resolveType() { - return TypeResolutions.isType( - field(), - SUPPLIERS::containsKey, - sourceText(), - DEFAULT, - "any type except unsigned_long and spatial types" - ); + return TypeResolutions.isType(field(), SUPPLIERS::containsKey, sourceText(), DEFAULT, "any type except unsigned_long"); } @Override diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java index 9ed140558a474..a6d8e3c2e057f 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/AbstractFunctionTestCase.java @@ -858,7 +858,9 @@ protected final void assertTypeResolutionFailure(Expression expression) { @AfterClass public static void renderSignature() throws IOException { - if (System.getProperty("generateDocs") == null) { + // Temporarily turn off docs generation during docs freeze + // TODO: Only turn this back on once this generates the correct MD files + if (System.getProperty("generateDocs") == null || true) { return; } String name = functionName(); @@ -933,7 +935,9 @@ public static void renderDocs() throws IOException { } protected static void renderDocs(String name) throws IOException { - if (System.getProperty("generateDocs") == null) { + // Temporarily turn off docs generation during docs freeze + // TODO: Only turn this back on once this generates the correct MD files + if (System.getProperty("generateDocs") == null || true) { return; } if (binaryOperator(name) != null || unaryOperator(name) != null || searchOperator(name) != null || likeOrInOperator(name)) { diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesErrorTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesErrorTests.java index f9dafc954b6f5..d34fe1df29827 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesErrorTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesErrorTests.java @@ -32,6 +32,6 @@ protected Expression build(Source source, List args) { @Override protected Matcher expectedTypeErrorMatcher(List> validPerPosition, List signature) { - return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "any type except unsigned_long and spatial types")); + return equalTo(typeErrorMessage(false, validPerPosition, signature, (v, p) -> "any type except unsigned_long")); } } diff --git a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java index 80e6a7fc09d56..57795ee9270d1 100644 --- a/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java +++ b/x-pack/plugin/esql/src/test/java/org/elasticsearch/xpack/esql/expression/function/aggregate/ValuesTests.java @@ -52,7 +52,12 @@ public static Iterable parameters() { // Lower values for strings, as they take more space and may trigger the circuit breaker MultiRowTestCaseSupplier.stringCases(1, 20, DataType.KEYWORD), MultiRowTestCaseSupplier.stringCases(1, 20, DataType.TEXT), - MultiRowTestCaseSupplier.stringCases(1, 20, DataType.SEMANTIC_TEXT) + MultiRowTestCaseSupplier.stringCases(1, 20, DataType.SEMANTIC_TEXT), + // For spatial types, we can have many rows for points, but reduce rows for shapes to avoid circuit breaker + MultiRowTestCaseSupplier.geoPointCases(1, 1000, MultiRowTestCaseSupplier.IncludingAltitude.NO), + MultiRowTestCaseSupplier.cartesianPointCases(1, 1000, MultiRowTestCaseSupplier.IncludingAltitude.NO), + MultiRowTestCaseSupplier.geoShapeCasesWithoutCircle(1, 100, MultiRowTestCaseSupplier.IncludingAltitude.NO), + MultiRowTestCaseSupplier.cartesianShapeCasesWithoutCircle(1, 100, MultiRowTestCaseSupplier.IncludingAltitude.NO) ).flatMap(List::stream).map(ValuesTests::makeSupplier).collect(Collectors.toCollection(() -> suppliers)); return parameterSuppliersFromTypedDataWithDefaultChecksNoErrors(suppliers, false); diff --git a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml index ad3adbf93adaf..e632620817283 100644 --- a/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml +++ b/x-pack/plugin/src/yamlRestTest/resources/rest-api-spec/test/esql/130_spatial.yml @@ -3,7 +3,7 @@ setup: - requires: cluster_features: ["gte_v8.14.0"] reason: "Mixed cluster tests don't work with the changed error message from sort" - test_runner_features: allowed_warnings_regex + test_runner_features: [ capabilities, allowed_warnings_regex ] - do: indices.create: @@ -148,20 +148,25 @@ geo_point unsortable with limit from row: query: 'ROW wkt = ["POINT(42.9711 -14.7553)", "POINT(75.8093 22.7277)"] | MV_EXPAND wkt | EVAL pt = TO_GEOPOINT(wkt) | limit 5 | sort pt' --- -values unsupported for geo_point: - - do: - catch: '/.+argument of \[VALUES\(location\)\] must be .+/' - esql.query: - body: - query: 'FROM geo_points | STATS VALUES(location)' - ---- -values unsupported for geo_point status code: +values supported for geo_point: + - requires: + capabilities: + - method: POST + path: /_query + parameters: [ method, path, parameters, capabilities ] + capabilities: [ agg_values_spatial ] + reason: "Spatial types added to values aggregation in 8.19.0" - do: - catch: bad_request + allowed_warnings_regex: + - "No limit defined, adding default limit of \\[.*\\]" esql.query: body: - query: 'FROM geo_points | STATS VALUES(location)' + query: 'FROM geo_points | STATS locations = VALUES(location) | EVAL locations = MV_SORT(TO_STRING(locations))' + - length: { columns: 1 } + - match: { columns.0.name: locations } + - match: { columns.0.type: keyword } + - length: { values: 1 } + - match: { values.0.0: ["POINT (-1.0 1.0)", "POINT (1.0 -1.0)"] } --- cartesian_point: From 4f6a0e6840042133f937b0ddfd160e97ac2b68a1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Tue, 25 Feb 2025 13:39:58 +0100 Subject: [PATCH 09/20] [Entitlements] Follows links during FileAccessTree creation (#123357) --- .../runtime/policy/FileAccessTree.java | 48 +++++++++++++++---- .../runtime/policy/FileAccessTreeTests.java | 44 +++++++++++++++++ 2 files changed, 84 insertions(+), 8 deletions(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java index d0eded74556b7..336a00643e979 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTree.java @@ -10,18 +10,27 @@ package org.elasticsearch.entitlement.runtime.policy; import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; +import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement.Mode; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; +import java.io.IOException; +import java.io.UncheckedIOException; +import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.List; import java.util.Objects; +import java.util.function.BiConsumer; import static org.elasticsearch.core.PathUtils.getDefaultFileSystem; public final class FileAccessTree { + private static final Logger logger = LogManager.getLogger(FileAccessTree.class); private static final String FILE_SEPARATOR = getDefaultFileSystem().getSeparator(); private final String[] readPaths; @@ -30,6 +39,27 @@ public final class FileAccessTree { private FileAccessTree(FilesEntitlement filesEntitlement, PathLookup pathLookup) { List readPaths = new ArrayList<>(); List writePaths = new ArrayList<>(); + BiConsumer addPath = (path, mode) -> { + var normalized = normalizePath(path); + if (mode == Mode.READ_WRITE) { + writePaths.add(normalized); + } + readPaths.add(normalized); + }; + BiConsumer addPathAndMaybeLink = (path, mode) -> { + addPath.accept(path, mode); + // also try to follow symlinks. Lucene does this and writes to the target path. + if (Files.exists(path)) { + try { + Path realPath = path.toRealPath(); + if (realPath.equals(path) == false) { + addPath.accept(realPath, mode); + } + } catch (IOException e) { + throw new UncheckedIOException(e); + } + } + }; for (FilesEntitlement.FileData fileData : filesEntitlement.filesData()) { var platform = fileData.platform(); if (platform != null && platform.isCurrent() == false) { @@ -38,18 +68,20 @@ private FileAccessTree(FilesEntitlement filesEntitlement, PathLookup pathLookup) var mode = fileData.mode(); var paths = fileData.resolvePaths(pathLookup); paths.forEach(path -> { - var normalized = normalizePath(path); - if (mode == FilesEntitlement.Mode.READ_WRITE) { - writePaths.add(normalized); + if (path == null) { + // TODO: null paths shouldn't be allowed, but they can occur due to repo paths + return; } - readPaths.add(normalized); + addPathAndMaybeLink.accept(path, mode); }); } - // everything has access to the temp dir - String tempDir = normalizePath(pathLookup.tempDir()); - readPaths.add(tempDir); - writePaths.add(tempDir); + // everything has access to the temp dir and the jdk + addPathAndMaybeLink.accept(pathLookup.tempDir(), Mode.READ_WRITE); + + // TODO: watcher uses javax.activation which looks for known mime types configuration, should this be global or explicit in watcher? + Path jdk = Paths.get(System.getProperty("java.home")); + addPathAndMaybeLink.accept(jdk.resolve("conf"), Mode.READ); readPaths.sort(PATH_ORDER); writePaths.sort(PATH_ORDER); diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java index 71ec497b9ec13..98fd98b75719e 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/FileAccessTreeTests.java @@ -10,11 +10,15 @@ package org.elasticsearch.entitlement.runtime.policy; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.entitlement.runtime.policy.entitlements.FilesEntitlement; import org.elasticsearch.test.ESTestCase; import org.junit.BeforeClass; +import java.io.IOException; +import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; import java.util.List; @@ -23,6 +27,7 @@ import static org.elasticsearch.core.PathUtils.getDefaultFileSystem; import static org.hamcrest.Matchers.is; +@ESTestCase.WithoutSecurityManager public class FileAccessTreeTests extends ESTestCase { static Path root; @@ -211,6 +216,45 @@ public void testForwardSlashes() { assertThat(tree.canRead(path("m/n")), is(true)); } + public void testJdkAccess() { + Path jdkDir = Paths.get(System.getProperty("java.home")); + var confDir = jdkDir.resolve("conf"); + var tree = accessTree(FilesEntitlement.EMPTY); + + assertThat(tree.canRead(confDir), is(true)); + assertThat(tree.canWrite(confDir), is(false)); + assertThat(tree.canRead(jdkDir), is(false)); + } + + @SuppressForbidden(reason = "don't care about the directory location in tests") + public void testFollowLinks() throws IOException { + Path baseSourceDir = Files.createTempDirectory("fileaccess_source"); + Path source1Dir = baseSourceDir.resolve("source1"); + Files.createDirectory(source1Dir); + Path source2Dir = baseSourceDir.resolve("source2"); + Files.createDirectory(source2Dir); + + Path baseTargetDir = Files.createTempDirectory("fileaccess_target"); + Path readTarget = baseTargetDir.resolve("read_link"); + Path writeTarget = baseTargetDir.resolve("write_link"); + Files.createSymbolicLink(readTarget, source1Dir); + Files.createSymbolicLink(writeTarget, source2Dir); + var tree = accessTree(entitlement(readTarget.toString(), "read", writeTarget.toString(), "read_write")); + + assertThat(tree.canRead(baseSourceDir), is(false)); + assertThat(tree.canRead(baseTargetDir), is(false)); + + assertThat(tree.canRead(readTarget), is(true)); + assertThat(tree.canWrite(readTarget), is(false)); + assertThat(tree.canRead(source1Dir), is(true)); + assertThat(tree.canWrite(source1Dir), is(false)); + + assertThat(tree.canRead(writeTarget), is(true)); + assertThat(tree.canWrite(writeTarget), is(true)); + assertThat(tree.canRead(source2Dir), is(true)); + assertThat(tree.canWrite(source2Dir), is(true)); + } + public void testTempDirAccess() { var tree = FileAccessTree.of(FilesEntitlement.EMPTY, TEST_PATH_LOOKUP); assertThat(tree.canRead(TEST_PATH_LOOKUP.tempDir()), is(true)); From d0db4cd085f830fa26391a267883ba2325a5e518 Mon Sep 17 00:00:00 2001 From: David Turner Date: Tue, 25 Feb 2025 12:50:43 +0000 Subject: [PATCH 10/20] Reduce licence checks in `LicensedWriteLoadForecaster` (#123346) Rather than checking the license (updating the usage map) on every single shard, just do it once at the start of a computation that needs to forecast write loads. Closes #123247 --- docs/changelog/123346.yaml | 6 ++ .../TransportGetDesiredBalanceAction.java | 1 + .../rollover/MetadataRolloverService.java | 1 + ...deAllocationStatsAndWeightsCalculator.java | 4 + .../allocation/WriteLoadForecaster.java | 5 ++ .../allocator/BalancedShardsAllocator.java | 5 ++ .../cluster/ESAllocationTestCase.java | 3 + .../WriteLoadForecasterIT.java | 2 + .../LicensedWriteLoadForecaster.java | 49 +++++++++++-- .../LicensedWriteLoadForecasterTests.java | 73 ++++++++++++++++++- 10 files changed, 141 insertions(+), 8 deletions(-) create mode 100644 docs/changelog/123346.yaml diff --git a/docs/changelog/123346.yaml b/docs/changelog/123346.yaml new file mode 100644 index 0000000000000..42c6fbf6931ad --- /dev/null +++ b/docs/changelog/123346.yaml @@ -0,0 +1,6 @@ +pr: 123346 +summary: Reduce license checks in `LicensedWriteLoadForecaster` +area: CRUD +type: bug +issues: + - 123247 diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java index 454fdad7cccc0..d01fd702f79be 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/TransportGetDesiredBalanceAction.java @@ -92,6 +92,7 @@ protected void masterOperation( return; } var clusterInfo = clusterInfoService.getClusterInfo(); + writeLoadForecaster.refreshLicense(); listener.onResponse( new DesiredBalanceResponse( desiredBalanceShardsAllocator.getStats(), diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java index 0c22a17bb1f6b..63a5a792db3c9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/rollover/MetadataRolloverService.java @@ -430,6 +430,7 @@ yield new DataStreamAutoShardingEvent( ); } + writeLoadForecaster.refreshLicense(); metadataBuilder = writeLoadForecaster.withWriteLoadForecastForWriteIndex(dataStreamName, metadataBuilder); metadataBuilder = withShardSizeForecastForWriteIndex(dataStreamName, metadataBuilder); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationStatsAndWeightsCalculator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationStatsAndWeightsCalculator.java index f793ab8f9eb71..d2a13b14bb4b9 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationStatsAndWeightsCalculator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationStatsAndWeightsCalculator.java @@ -70,6 +70,10 @@ public Map nodesAllocationStatsAndWeights( ClusterInfo clusterInfo, @Nullable DesiredBalance desiredBalance ) { + if (metadata.indices().isEmpty() == false) { + // must not use licensed features when just starting up + writeLoadForecaster.refreshLicense(); + } var weightFunction = new WeightFunction(shardBalanceFactor, indexBalanceFactor, writeLoadBalanceFactor, diskUsageBalanceFactor); var avgShardsPerNode = WeightFunction.avgShardPerNode(metadata, routingNodes); var avgWriteLoadPerNode = WeightFunction.avgWriteLoadPerNode(writeLoadForecaster, metadata, routingNodes); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/WriteLoadForecaster.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/WriteLoadForecaster.java index e7ca51eee815e..7bebedd9fdde4 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/WriteLoadForecaster.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/WriteLoadForecaster.java @@ -21,6 +21,8 @@ public interface WriteLoadForecaster { OptionalDouble getForecastedWriteLoad(IndexMetadata indexMetadata); + void refreshLicense(); + class DefaultWriteLoadForecaster implements WriteLoadForecaster { @Override public Metadata.Builder withWriteLoadForecastForWriteIndex(String dataStreamName, Metadata.Builder metadata) { @@ -31,5 +33,8 @@ public Metadata.Builder withWriteLoadForecastForWriteIndex(String dataStreamName public OptionalDouble getForecastedWriteLoad(IndexMetadata indexMetadata) { return OptionalDouble.empty(); } + + @Override + public void refreshLicense() {} } } diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 0ce1605234636..a39c4cce6c401 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -145,6 +145,11 @@ public BalancedShardsAllocator(ClusterSettings clusterSettings, WriteLoadForecas @Override public void allocate(RoutingAllocation allocation) { + if (allocation.metadata().indices().isEmpty() == false) { + // must not use licensed features when just starting up + writeLoadForecaster.refreshLicense(); + } + assert allocation.ignoreDisable() == false; if (allocation.routingNodes().size() == 0) { diff --git a/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java b/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java index 132dd4b119469..1106aa1988845 100644 --- a/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/cluster/ESAllocationTestCase.java @@ -89,6 +89,9 @@ public Metadata.Builder withWriteLoadForecastForWriteIndex(String dataStreamName public OptionalDouble getForecastedWriteLoad(IndexMetadata indexMetadata) { return indexMetadata.getForecastedWriteLoad(); } + + @Override + public void refreshLicense() {} }; public static MockAllocationService createAllocationService() { diff --git a/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java b/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java index b37b026b853e2..5c174d1bddef2 100644 --- a/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java +++ b/x-pack/plugin/write-load-forecaster/src/internalClusterTest/java/org/elasticsearch/xpack/writeloadforecaster/WriteLoadForecasterIT.java @@ -84,6 +84,7 @@ public void testWriteLoadForecastGetsPopulatedDuringRollovers() throws Exception assertAllPreviousForecastsAreClearedAfterRollover(dataStream, metadata); setHasValidLicense(false); + writeLoadForecaster.refreshLicense(); final OptionalDouble forecastedWriteLoadAfterLicenseChange = writeLoadForecaster.getForecastedWriteLoad(writeIndexMetadata); assertThat(forecastedWriteLoadAfterLicenseChange.isPresent(), is(equalTo(false))); @@ -131,6 +132,7 @@ public void testWriteLoadForecastIsOverriddenBySetting() throws Exception { assertAllPreviousForecastsAreClearedAfterRollover(dataStream, metadata); setHasValidLicense(false); + writeLoadForecaster.refreshLicense(); final OptionalDouble forecastedWriteLoadAfterLicenseChange = writeLoadForecaster.getForecastedWriteLoad(writeIndexMetadata); assertThat(forecastedWriteLoadAfterLicenseChange.isPresent(), is(equalTo(false))); diff --git a/x-pack/plugin/write-load-forecaster/src/main/java/org/elasticsearch/xpack/writeloadforecaster/LicensedWriteLoadForecaster.java b/x-pack/plugin/write-load-forecaster/src/main/java/org/elasticsearch/xpack/writeloadforecaster/LicensedWriteLoadForecaster.java index d4a85ce859b2b..45c5abdc61fd6 100644 --- a/x-pack/plugin/write-load-forecaster/src/main/java/org/elasticsearch/xpack/writeloadforecaster/LicensedWriteLoadForecaster.java +++ b/x-pack/plugin/write-load-forecaster/src/main/java/org/elasticsearch/xpack/writeloadforecaster/LicensedWriteLoadForecaster.java @@ -19,8 +19,12 @@ import org.elasticsearch.core.SuppressForbidden; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; +import org.elasticsearch.logging.LogManager; +import org.elasticsearch.logging.Logger; import org.elasticsearch.threadpool.ThreadPool; +import java.lang.invoke.MethodHandles; +import java.lang.invoke.VarHandle; import java.util.List; import java.util.Objects; import java.util.OptionalDouble; @@ -30,6 +34,9 @@ import static org.elasticsearch.xpack.writeloadforecaster.WriteLoadForecasterPlugin.OVERRIDE_WRITE_LOAD_FORECAST_SETTING; class LicensedWriteLoadForecaster implements WriteLoadForecaster { + + private static final Logger logger = LogManager.getLogger(LicensedWriteLoadForecaster.class); + public static final Setting MAX_INDEX_AGE_SETTING = Setting.timeSetting( "write_load_forecaster.max_index_age", TimeValue.timeValueDays(7), @@ -37,23 +44,26 @@ class LicensedWriteLoadForecaster implements WriteLoadForecaster { Setting.Property.NodeScope, Setting.Property.Dynamic ); - private final BooleanSupplier hasValidLicense; + private final BooleanSupplier hasValidLicenseSupplier; private final ThreadPool threadPool; private volatile TimeValue maxIndexAge; + @SuppressWarnings("unused") // modified via VH_HAS_VALID_LICENSE_FIELD + private volatile boolean hasValidLicense; + LicensedWriteLoadForecaster( - BooleanSupplier hasValidLicense, + BooleanSupplier hasValidLicenseSupplier, ThreadPool threadPool, Settings settings, ClusterSettings clusterSettings ) { - this(hasValidLicense, threadPool, MAX_INDEX_AGE_SETTING.get(settings)); + this(hasValidLicenseSupplier, threadPool, MAX_INDEX_AGE_SETTING.get(settings)); clusterSettings.addSettingsUpdateConsumer(MAX_INDEX_AGE_SETTING, this::setMaxIndexAgeSetting); } // exposed for tests only - LicensedWriteLoadForecaster(BooleanSupplier hasValidLicense, ThreadPool threadPool, TimeValue maxIndexAge) { - this.hasValidLicense = hasValidLicense; + LicensedWriteLoadForecaster(BooleanSupplier hasValidLicenseSupplier, ThreadPool threadPool, TimeValue maxIndexAge) { + this.hasValidLicenseSupplier = hasValidLicenseSupplier; this.threadPool = threadPool; this.maxIndexAge = maxIndexAge; } @@ -64,7 +74,7 @@ private void setMaxIndexAgeSetting(TimeValue updatedMaxIndexAge) { @Override public Metadata.Builder withWriteLoadForecastForWriteIndex(String dataStreamName, Metadata.Builder metadata) { - if (hasValidLicense.getAsBoolean() == false) { + if (hasValidLicense == false) { return metadata; } @@ -143,7 +153,7 @@ static OptionalDouble forecastIndexWriteLoad(List indicesWriteLo @Override @SuppressForbidden(reason = "This is the only place where IndexMetadata#getForecastedWriteLoad is allowed to be used") public OptionalDouble getForecastedWriteLoad(IndexMetadata indexMetadata) { - if (hasValidLicense.getAsBoolean() == false) { + if (hasValidLicense == false) { return OptionalDouble.empty(); } @@ -154,4 +164,29 @@ public OptionalDouble getForecastedWriteLoad(IndexMetadata indexMetadata) { return indexMetadata.getForecastedWriteLoad(); } + + /** + * Used to atomically {@code getAndSet()} the {@link #hasValidLicense} field. This is better than an + * {@link java.util.concurrent.atomic.AtomicBoolean} because it takes one less pointer dereference on each read. + */ + private static final VarHandle VH_HAS_VALID_LICENSE_FIELD; + + static { + try { + VH_HAS_VALID_LICENSE_FIELD = MethodHandles.lookup() + .in(LicensedWriteLoadForecaster.class) + .findVarHandle(LicensedWriteLoadForecaster.class, "hasValidLicense", boolean.class); + } catch (NoSuchFieldException | IllegalAccessException e) { + throw new RuntimeException(e); + } + } + + @Override + public void refreshLicense() { + final var newValue = hasValidLicenseSupplier.getAsBoolean(); + final var oldValue = (boolean) VH_HAS_VALID_LICENSE_FIELD.getAndSet(this, newValue); + if (newValue != oldValue) { + logger.info("license state changed, now [{}]", newValue ? "valid" : "not valid"); + } + } } diff --git a/x-pack/plugin/write-load-forecaster/src/test/java/org/elasticsearch/xpack/writeloadforecaster/LicensedWriteLoadForecasterTests.java b/x-pack/plugin/write-load-forecaster/src/test/java/org/elasticsearch/xpack/writeloadforecaster/LicensedWriteLoadForecasterTests.java index 790af0a201578..162e84b2562c5 100644 --- a/x-pack/plugin/write-load-forecaster/src/test/java/org/elasticsearch/xpack/writeloadforecaster/LicensedWriteLoadForecasterTests.java +++ b/x-pack/plugin/write-load-forecaster/src/test/java/org/elasticsearch/xpack/writeloadforecaster/LicensedWriteLoadForecasterTests.java @@ -7,6 +7,8 @@ package org.elasticsearch.xpack.writeloadforecaster; +import org.apache.logging.log4j.Level; +import org.apache.logging.log4j.core.LogEvent; import org.elasticsearch.cluster.metadata.DataStream; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.cluster.metadata.IndexMetadataStats; @@ -19,6 +21,7 @@ import org.elasticsearch.index.IndexMode; import org.elasticsearch.index.IndexVersion; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.MockLog; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.junit.After; @@ -30,9 +33,12 @@ import java.util.OptionalDouble; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; +import java.util.concurrent.atomic.AtomicInteger; import static org.elasticsearch.xpack.writeloadforecaster.LicensedWriteLoadForecaster.forecastIndexWriteLoad; import static org.hamcrest.Matchers.closeTo; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; import static org.hamcrest.Matchers.is; @@ -53,7 +59,13 @@ public void tearDownThreadPool() { public void testWriteLoadForecastIsAddedToWriteIndex() { final TimeValue maxIndexAge = TimeValue.timeValueDays(7); final AtomicBoolean hasValidLicense = new AtomicBoolean(true); - final WriteLoadForecaster writeLoadForecaster = new LicensedWriteLoadForecaster(hasValidLicense::get, threadPool, maxIndexAge); + final AtomicInteger licenseCheckCount = new AtomicInteger(); + final WriteLoadForecaster writeLoadForecaster = new LicensedWriteLoadForecaster(() -> { + licenseCheckCount.incrementAndGet(); + return hasValidLicense.get(); + }, threadPool, maxIndexAge); + + writeLoadForecaster.refreshLicense(); final Metadata.Builder metadataBuilder = Metadata.builder(); final String dataStreamName = "logs-es"; @@ -95,8 +107,12 @@ public void testWriteLoadForecastIsAddedToWriteIndex() { assertThat(forecastedWriteLoad.isPresent(), is(true)); assertThat(forecastedWriteLoad.getAsDouble(), is(greaterThan(0.0))); + assertThat(licenseCheckCount.get(), equalTo(1)); hasValidLicense.set(false); + writeLoadForecaster.refreshLicense(); + assertThat(licenseCheckCount.get(), equalTo(2)); + final OptionalDouble forecastedWriteLoadAfterLicenseChange = writeLoadForecaster.getForecastedWriteLoad(writeIndex); assertThat(forecastedWriteLoadAfterLicenseChange.isPresent(), is(false)); } @@ -136,6 +152,7 @@ public void testUptimeIsUsedToWeightWriteLoad() { metadataBuilder.put(dataStream); final WriteLoadForecaster writeLoadForecaster = new LicensedWriteLoadForecaster(() -> true, threadPool, maxIndexAge); + writeLoadForecaster.refreshLicense(); final Metadata.Builder updatedMetadataBuilder = writeLoadForecaster.withWriteLoadForecastForWriteIndex( dataStream.getName(), @@ -154,6 +171,7 @@ public void testForecastedWriteLoadIsOverriddenBySetting() { final TimeValue maxIndexAge = TimeValue.timeValueDays(7); final AtomicBoolean hasValidLicense = new AtomicBoolean(true); final WriteLoadForecaster writeLoadForecaster = new LicensedWriteLoadForecaster(hasValidLicense::get, threadPool, maxIndexAge); + writeLoadForecaster.refreshLicense(); final Metadata.Builder metadataBuilder = Metadata.builder(); final String dataStreamName = "logs-es"; @@ -197,6 +215,7 @@ public void testForecastedWriteLoadIsOverriddenBySetting() { assertThat(forecastedWriteLoad.getAsDouble(), is(equalTo(0.6))); hasValidLicense.set(false); + writeLoadForecaster.refreshLicense(); final OptionalDouble forecastedWriteLoadAfterLicenseChange = writeLoadForecaster.getForecastedWriteLoad(writeIndex); assertThat(forecastedWriteLoadAfterLicenseChange.isPresent(), is(false)); @@ -327,4 +346,56 @@ private DataStream createDataStream(String name, List backingIndices) { .setIndexMode(IndexMode.STANDARD) .build(); } + + public void testLicenseStateLogging() { + + final var seenMessages = new ArrayList(); + + final var collectingLoggingAssertion = new MockLog.SeenEventExpectation( + "seen event", + LicensedWriteLoadForecaster.class.getCanonicalName(), + Level.INFO, + "*" + ) { + @Override + public boolean innerMatch(LogEvent event) { + final var message = event.getMessage().getFormattedMessage(); + if (message.startsWith("license state changed, now [")) { + seenMessages.add(message); + return true; + } + + return false; + } + }; + + MockLog.assertThatLogger(() -> { + final var hasValidLicense = new AtomicBoolean(); + final var writeLoadForecaster = new LicensedWriteLoadForecaster(hasValidLicense::get, threadPool, randomTimeValue()); + assertThat(seenMessages, empty()); + writeLoadForecaster.refreshLicense(); + assertThat(seenMessages, empty()); + + hasValidLicense.set(true); + writeLoadForecaster.refreshLicense(); + assertThat(seenMessages, contains("license state changed, now [valid]")); + writeLoadForecaster.refreshLicense(); + assertThat(seenMessages, contains("license state changed, now [valid]")); + + hasValidLicense.set(false); + writeLoadForecaster.refreshLicense(); + assertThat(seenMessages, contains("license state changed, now [valid]", "license state changed, now [not valid]")); + + hasValidLicense.set(true); + ESTestCase.startInParallel(between(1, 10), ignored -> writeLoadForecaster.refreshLicense()); + assertThat( + seenMessages, + contains( + "license state changed, now [valid]", + "license state changed, now [not valid]", + "license state changed, now [valid]" + ) + ); + }, LicensedWriteLoadForecaster.class, collectingLoggingAssertion); + } } From 2f0e1dabf7f59f058647f11aca7a5705e8d2aa70 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Tue, 25 Feb 2025 13:55:25 +0100 Subject: [PATCH 11/20] Missing log4j policy (#123360) --- .../initialization/EntitlementInitialization.java | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 0fef2c27df663..d3d09e0f84aee 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -222,7 +222,10 @@ private static PolicyManager createPolicyManager() { "org.apache.lucene.misc", List.of(new FilesEntitlement(List.of(FileData.ofRelativePath(Path.of(""), DATA, READ_WRITE)))) ), - new Scope("org.apache.logging.log4j.core", List.of(new ManageThreadsEntitlement())), + new Scope( + "org.apache.logging.log4j.core", + List.of(new ManageThreadsEntitlement(), new FilesEntitlement(List.of(FileData.ofPath(bootstrapArgs.logsDir(), READ_WRITE)))) + ), new Scope( "org.elasticsearch.nativeaccess", List.of( From 6c55099784b9f2953905541d987d94a5104c2ec8 Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Tue, 25 Feb 2025 14:42:41 +0100 Subject: [PATCH 12/20] Store arrays offsets for ip fields natively with synthetic source (#122999) Follow up of #113757 and adds support to natively store array offsets for ip fields instead of falling back to ignored source. --- docs/changelog/122999.yaml | 5 + .../elasticsearch/index/IndexVersions.java | 1 + .../index/mapper/FieldArrayContext.java | 47 ++- .../index/mapper/IpFieldMapper.java | 86 +++-- .../index/mapper/KeywordFieldMapper.java | 41 +-- ...etsDocValuesSyntheticFieldLoaderLayer.java | 32 +- ...eticSourceNativeArrayIntegrationTests.java | 91 ++++++ .../index/mapper/IpFieldMapperTests.java | 5 + .../index/mapper/IpFieldTypeTests.java | 20 +- .../mapper/IpOffsetDocValuesLoaderTests.java | 41 +++ .../KeywordOffsetDocValuesLoaderTests.java | 215 +------------ ...eticSourceNativeArrayIntegrationTests.java | 283 +---------------- .../NativeArrayIntegrationTestCase.java | 299 ++++++++++++++++++ .../mapper/OffsetDocValuesLoaderTestCase.java | 230 ++++++++++++++ 14 files changed, 859 insertions(+), 537 deletions(-) create mode 100644 docs/changelog/122999.yaml create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/IPSyntheticSourceNativeArrayIntegrationTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/IpOffsetDocValuesLoaderTests.java create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/NativeArrayIntegrationTestCase.java create mode 100644 server/src/test/java/org/elasticsearch/index/mapper/OffsetDocValuesLoaderTestCase.java diff --git a/docs/changelog/122999.yaml b/docs/changelog/122999.yaml new file mode 100644 index 0000000000000..a0134afc59a05 --- /dev/null +++ b/docs/changelog/122999.yaml @@ -0,0 +1,5 @@ +pr: 122999 +summary: Store arrays offsets for ip fields natively with synthetic source +area: Mapping +type: enhancement +issues: [] diff --git a/server/src/main/java/org/elasticsearch/index/IndexVersions.java b/server/src/main/java/org/elasticsearch/index/IndexVersions.java index 2d1fe31337398..69c9d050d592d 100644 --- a/server/src/main/java/org/elasticsearch/index/IndexVersions.java +++ b/server/src/main/java/org/elasticsearch/index/IndexVersions.java @@ -150,6 +150,7 @@ private static Version parseUnchecked(String version) { public static final IndexVersion TIMESTAMP_DOC_VALUES_SPARSE_INDEX = def(9_011_0_00, Version.LUCENE_10_1_0); public static final IndexVersion TIME_SERIES_ID_DOC_VALUES_SPARSE_INDEX = def(9_012_0_00, Version.LUCENE_10_1_0); public static final IndexVersion SYNTHETIC_SOURCE_STORE_ARRAYS_NATIVELY_KEYWORD = def(9_013_0_00, Version.LUCENE_10_1_0); + public static final IndexVersion SYNTHETIC_SOURCE_STORE_ARRAYS_NATIVELY_IP = def(9_014_0_00, Version.LUCENE_10_1_0); /* * STOP! READ THIS FIRST! No, really, * ____ _____ ___ ____ _ ____ _____ _ ____ _____ _ _ ___ ____ _____ ___ ____ ____ _____ _ diff --git a/server/src/main/java/org/elasticsearch/index/mapper/FieldArrayContext.java b/server/src/main/java/org/elasticsearch/index/mapper/FieldArrayContext.java index 523ac19524ee2..ca293be5f511a 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/FieldArrayContext.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/FieldArrayContext.java @@ -13,6 +13,8 @@ import org.apache.lucene.util.BitUtil; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.index.IndexVersion; +import org.elasticsearch.index.IndexVersions; import java.io.IOException; import java.util.ArrayList; @@ -23,9 +25,10 @@ public class FieldArrayContext { + private static final String OFFSETS_FIELD_NAME_SUFFIX = ".offsets"; private final Map offsetsPerField = new HashMap<>(); - void recordOffset(String field, String value) { + void recordOffset(String field, Comparable value) { Offsets arrayOffsets = offsetsPerField.computeIfAbsent(field, k -> new Offsets()); int nextOffset = arrayOffsets.currentOffset++; var offsets = arrayOffsets.valueToOffsets.computeIfAbsent(value, s -> new ArrayList<>(2)); @@ -79,13 +82,53 @@ static int[] parseOffsetArray(StreamInput in) throws IOException { return offsetToOrd; } + static String getOffsetsFieldName( + MapperBuilderContext context, + Mapper.SourceKeepMode indexSourceKeepMode, + boolean hasDocValues, + boolean isStored, + FieldMapper.Builder fieldMapperBuilder, + IndexVersion indexCreatedVersion, + IndexVersion minSupportedVersionMain + ) { + var sourceKeepMode = fieldMapperBuilder.sourceKeepMode.orElse(indexSourceKeepMode); + if (context.isSourceSynthetic() + && sourceKeepMode == Mapper.SourceKeepMode.ARRAYS + && hasDocValues + && isStored == false + && fieldMapperBuilder.copyTo.copyToFields().isEmpty() + && fieldMapperBuilder.multiFieldsBuilder.hasMultiFields() == false + && indexVersionSupportStoringArraysNatively(indexCreatedVersion, minSupportedVersionMain)) { + // Skip stored, we will be synthesizing from stored fields, no point to keep track of the offsets + // Skip copy_to and multi fields, supporting that requires more work. However, copy_to usage is rare in metrics and + // logging use cases + + // keep track of value offsets so that we can reconstruct arrays from doc values in order as was specified during indexing + // (if field is stored then there is no point of doing this) + return context.buildFullName(fieldMapperBuilder.leafName() + FieldArrayContext.OFFSETS_FIELD_NAME_SUFFIX); + } else { + return null; + } + } + + private static boolean indexVersionSupportStoringArraysNatively( + IndexVersion indexCreatedVersion, + IndexVersion minSupportedVersionMain + ) { + return indexCreatedVersion.onOrAfter(minSupportedVersionMain) + || indexCreatedVersion.between( + IndexVersions.SYNTHETIC_SOURCE_STORE_ARRAYS_NATIVELY_KEYWORD_BACKPORT_8_X, + IndexVersions.UPGRADE_TO_LUCENE_10_0_0 + ); + } + private static class Offsets { int currentOffset; // Need to use TreeMap here, so that we maintain the order in which each value (with offset) stored inserted, // (which is in the same order the document gets parsed) so we store offsets in right order. This is the same // order in what the values get stored in SortedSetDocValues. - final Map> valueToOffsets = new TreeMap<>(); + final Map, List> valueToOffsets = new TreeMap<>(); final List nullValueOffsets = new ArrayList<>(2); } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java index 3ded3d2699b21..8e6224e5720cb 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/IpFieldMapper.java @@ -55,6 +55,7 @@ import java.util.Objects; import java.util.function.BiFunction; +import static org.elasticsearch.index.mapper.FieldArrayContext.getOffsetsFieldName; import static org.elasticsearch.index.mapper.IpPrefixAutomatonUtil.buildIpPrefixAutomaton; /** @@ -92,8 +93,15 @@ public static final class Builder extends FieldMapper.DimensionBuilder { private final boolean ignoreMalformedByDefault; private final IndexVersion indexCreatedVersion; private final ScriptCompiler scriptCompiler; + private final SourceKeepMode indexSourceKeepMode; - public Builder(String name, ScriptCompiler scriptCompiler, boolean ignoreMalformedByDefault, IndexVersion indexCreatedVersion) { + public Builder( + String name, + ScriptCompiler scriptCompiler, + boolean ignoreMalformedByDefault, + IndexVersion indexCreatedVersion, + SourceKeepMode indexSourceKeepMode + ) { super(name); this.scriptCompiler = Objects.requireNonNull(scriptCompiler); this.ignoreMalformedByDefault = ignoreMalformedByDefault; @@ -114,6 +122,7 @@ public Builder(String name, ScriptCompiler scriptCompiler, boolean ignoreMalform ); } }); + this.indexSourceKeepMode = indexSourceKeepMode; } Builder nullValue(String nullValue) { @@ -184,6 +193,16 @@ public IpFieldMapper build(MapperBuilderContext context) { } hasScript = script.get() != null; onScriptError = onScriptErrorParam.getValue(); + + String offsetsFieldName = getOffsetsFieldName( + context, + indexSourceKeepMode, + hasDocValues.getValue(), + stored.getValue(), + this, + indexCreatedVersion, + IndexVersions.SYNTHETIC_SOURCE_STORE_ARRAYS_NATIVELY_IP + ); return new IpFieldMapper( leafName(), new IpFieldType( @@ -198,7 +217,8 @@ public IpFieldMapper build(MapperBuilderContext context) { ), builderParams(this, context), context.isSourceSynthetic(), - this + this, + offsetsFieldName ); } @@ -206,7 +226,7 @@ public IpFieldMapper build(MapperBuilderContext context) { public static final TypeParser PARSER = createTypeParserWithLegacySupport((n, c) -> { boolean ignoreMalformedByDefault = IGNORE_MALFORMED_SETTING.get(c.getSettings()); - return new Builder(n, c.scriptCompiler(), ignoreMalformedByDefault, c.indexVersionCreated()); + return new Builder(n, c.scriptCompiler(), ignoreMalformedByDefault, c.indexVersionCreated(), c.getIndexSettings().sourceKeepMode()); }); public static final class IpFieldType extends SimpleMappedFieldType { @@ -501,13 +521,16 @@ public TermsEnum getTerms(IndexReader reader, String prefix, boolean caseInsensi private final Script script; private final FieldValues scriptValues; private final ScriptCompiler scriptCompiler; + private final SourceKeepMode indexSourceKeepMode; + private final String offsetsFieldName; private IpFieldMapper( String simpleName, MappedFieldType mappedFieldType, BuilderParams builderParams, boolean storeIgnored, - Builder builder + Builder builder, + String offsetsFieldName ) { super(simpleName, mappedFieldType, builderParams); this.ignoreMalformedByDefault = builder.ignoreMalformedByDefault; @@ -523,6 +546,8 @@ private IpFieldMapper( this.scriptCompiler = builder.scriptCompiler; this.dimension = builder.dimension.getValue(); this.storeIgnored = storeIgnored; + this.indexSourceKeepMode = builder.indexSourceKeepMode; + this.offsetsFieldName = offsetsFieldName; } @Override @@ -561,6 +586,14 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio if (address != null) { indexValue(context, address); } + if (offsetsFieldName != null && context.isImmediateParentAnArray() && context.canAddIgnoredField()) { + if (address != null) { + BytesRef sortableValue = new BytesRef(InetAddressPoint.encode(address)); + context.getOffSetContext().recordOffset(offsetsFieldName, sortableValue); + } else { + context.getOffSetContext().recordNull(offsetsFieldName); + } + } } private void indexValue(DocumentParserContext context, InetAddress address) { @@ -593,7 +626,9 @@ protected void indexScriptValues( @Override public FieldMapper.Builder getMergeBuilder() { - return new Builder(leafName(), scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion).dimension(dimension).init(this); + return new Builder(leafName(), scriptCompiler, ignoreMalformedByDefault, indexCreatedVersion, indexSourceKeepMode).dimension( + dimension + ).init(this); } @Override @@ -610,19 +645,24 @@ protected SyntheticSourceSupport syntheticSourceSupport() { if (hasDocValues) { return new SyntheticSourceSupport.Native(() -> { var layers = new ArrayList(); - layers.add(new SortedSetDocValuesSyntheticFieldLoaderLayer(fullPath()) { - @Override - protected BytesRef convert(BytesRef value) { - byte[] bytes = Arrays.copyOfRange(value.bytes, value.offset, value.offset + value.length); - return new BytesRef(NetworkAddress.format(InetAddressPoint.decode(bytes))); - } - - @Override - protected BytesRef preserve(BytesRef value) { - // No need to copy because convert has made a deep copy - return value; - } - }); + if (offsetsFieldName != null) { + layers.add( + new SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer(fullPath(), offsetsFieldName, IpFieldMapper::convert) + ); + } else { + layers.add(new SortedSetDocValuesSyntheticFieldLoaderLayer(fullPath()) { + @Override + protected BytesRef convert(BytesRef value) { + return IpFieldMapper.convert(value); + } + + @Override + protected BytesRef preserve(BytesRef value) { + // No need to copy because convert has made a deep copy + return value; + } + }); + } if (ignoreMalformed) { layers.add(new CompositeSyntheticFieldLoader.MalformedValuesLayer(fullPath())); @@ -633,4 +673,14 @@ protected BytesRef preserve(BytesRef value) { return super.syntheticSourceSupport(); } + + static BytesRef convert(BytesRef value) { + byte[] bytes = Arrays.copyOfRange(value.bytes, value.offset, value.offset + value.length); + return new BytesRef(NetworkAddress.format(InetAddressPoint.decode(bytes))); + } + + @Override + public String getOffsetFieldName() { + return offsetsFieldName; + } } diff --git a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java index 195ec5a27a72c..b7e8e13e16839 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/KeywordFieldMapper.java @@ -85,6 +85,7 @@ import static org.elasticsearch.core.Strings.format; import static org.elasticsearch.index.IndexSettings.IGNORE_ABOVE_SETTING; import static org.elasticsearch.index.IndexSettings.USE_DOC_VALUES_SKIPPER; +import static org.elasticsearch.index.mapper.FieldArrayContext.getOffsetsFieldName; /** * A field mapper for keywords. This mapper accepts strings and indexes them as-is. @@ -95,7 +96,6 @@ public final class KeywordFieldMapper extends FieldMapper { public static final String CONTENT_TYPE = "keyword"; private static final String HOST_NAME = "host.name"; - public static final String OFFSETS_FIELD_NAME_SUFFIX = ".offsets"; public static class Defaults { public static final FieldType FIELD_TYPE; @@ -439,26 +439,15 @@ public KeywordFieldMapper build(MapperBuilderContext context) { super.hasScript = script.get() != null; super.onScriptError = onScriptError.getValue(); - var sourceKeepMode = this.sourceKeepMode.orElse(indexSourceKeepMode); - String offsetsFieldName; - if (context.isSourceSynthetic() - && sourceKeepMode == SourceKeepMode.ARRAYS - && hasDocValues() - && fieldtype.stored() == false - && copyTo.copyToFields().isEmpty() - && multiFieldsBuilder.hasMultiFields() == false - && indexVersionSupportStoringArraysNatively()) { - // Skip stored, we will be synthesizing from stored fields, no point to keep track of the offsets - // Skip copy_to and multi fields, supporting that requires more work. However, copy_to usage is rare in metrics and - // logging use cases - - // keep track of value offsets so that we can reconstruct arrays from doc values in order as was specified during indexing - // (if field is stored then there is no point of doing this) - offsetsFieldName = context.buildFullName(leafName() + OFFSETS_FIELD_NAME_SUFFIX); - } else { - offsetsFieldName = null; - } - + String offsetsFieldName = getOffsetsFieldName( + context, + indexSourceKeepMode, + hasDocValues.getValue(), + stored.getValue(), + this, + indexCreatedVersion, + IndexVersions.SYNTHETIC_SOURCE_STORE_ARRAYS_NATIVELY_KEYWORD + ); return new KeywordFieldMapper( leafName(), fieldtype, @@ -472,14 +461,6 @@ && indexVersionSupportStoringArraysNatively()) { ); } - private boolean indexVersionSupportStoringArraysNatively() { - return indexCreatedVersion.onOrAfter(IndexVersions.SYNTHETIC_SOURCE_STORE_ARRAYS_NATIVELY_KEYWORD) - || indexCreatedVersion.between( - IndexVersions.SYNTHETIC_SOURCE_STORE_ARRAYS_NATIVELY_KEYWORD_BACKPORT_8_X, - IndexVersions.UPGRADE_TO_LUCENE_10_0_0 - ); - } - private FieldType resolveFieldType( final boolean useDocValuesSkipper, final IndexVersion indexCreatedVersion, @@ -1127,7 +1108,7 @@ protected void parseCreateField(DocumentParserContext context) throws IOExceptio } boolean indexed = indexValue(context, value); - if (offsetsFieldName != null && context.isImmediateParentAnArray() && context.getRecordedSource() == false) { + if (offsetsFieldName != null && context.isImmediateParentAnArray() && context.canAddIgnoredField()) { if (indexed) { context.getOffSetContext().recordOffset(offsetsFieldName, value); } else if (value == null) { diff --git a/server/src/main/java/org/elasticsearch/index/mapper/SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer.java b/server/src/main/java/org/elasticsearch/index/mapper/SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer.java index 09a63eb6ab4a7..e04cdd06280d6 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.util.Objects; +import java.util.function.Function; /** * Load {@code _source} fields from {@link SortedSetDocValues} and associated {@link BinaryDocValues}. The former contains the unique values @@ -30,11 +31,29 @@ final class SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer implements Co private final String name; private final String offsetsFieldName; + private final Function converter; private DocValuesWithOffsetsLoader docValues; + /** + * @param name The name of the field to synthesize + * @param offsetsFieldName The related offset field used to correctly synthesize the field if it is a leaf array + */ SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer(String name, String offsetsFieldName) { + this(name, offsetsFieldName, Function.identity()); + } + + /** + * @param name The name of the field to synthesize + * @param offsetsFieldName The related offset field used to correctly synthesize the field if it is a leaf array + * @param converter This field value loader layer synthesizes the values read from doc values as utf8 string. If the doc value + * values aren't serializable as utf8 string then it is the responsibility of the converter to covert into a + * format that can be serialized as utf8 string. For example IP field mapper doc values can't directly be + * serialized as utf8 string. + */ + SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer(String name, String offsetsFieldName, Function converter) { this.name = Objects.requireNonNull(name); this.offsetsFieldName = Objects.requireNonNull(offsetsFieldName); + this.converter = Objects.requireNonNull(converter); } @Override @@ -47,7 +66,7 @@ public DocValuesLoader docValuesLoader(LeafReader leafReader, int[] docIdsInLeaf SortedSetDocValues valueDocValues = DocValues.getSortedSet(leafReader, name); SortedDocValues offsetDocValues = DocValues.getSorted(leafReader, offsetsFieldName); - return docValues = new DocValuesWithOffsetsLoader(valueDocValues, offsetDocValues); + return docValues = new DocValuesWithOffsetsLoader(valueDocValues, offsetDocValues, converter); } @Override @@ -78,15 +97,21 @@ public void write(XContentBuilder b) throws IOException { static final class DocValuesWithOffsetsLoader implements DocValuesLoader { private final SortedDocValues offsetDocValues; private final SortedSetDocValues valueDocValues; + private final Function converter; private final ByteArrayStreamInput scratch = new ByteArrayStreamInput(); private boolean hasValue; private boolean hasOffset; private int[] offsetToOrd; - DocValuesWithOffsetsLoader(SortedSetDocValues valueDocValues, SortedDocValues offsetDocValues) { + DocValuesWithOffsetsLoader( + SortedSetDocValues valueDocValues, + SortedDocValues offsetDocValues, + Function converter + ) { this.valueDocValues = valueDocValues; this.offsetDocValues = offsetDocValues; + this.converter = converter; } @Override @@ -146,7 +171,7 @@ public void write(XContentBuilder b) throws IOException { long ord = ords[offset]; BytesRef c = valueDocValues.lookupOrd(ord); - // This is keyword specific and needs to be updated once support is added for other field types: + c = converter.apply(c); b.utf8Value(c.bytes, c.offset, c.length); } } else if (offsetToOrd != null) { @@ -158,6 +183,7 @@ public void write(XContentBuilder b) throws IOException { } else { for (int i = 0; i < valueDocValues.docValueCount(); i++) { BytesRef c = valueDocValues.lookupOrd(valueDocValues.nextOrd()); + c = converter.apply(c); b.utf8Value(c.bytes, c.offset, c.length); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IPSyntheticSourceNativeArrayIntegrationTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IPSyntheticSourceNativeArrayIntegrationTests.java new file mode 100644 index 0000000000000..2ad08ebb10aae --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/IPSyntheticSourceNativeArrayIntegrationTests.java @@ -0,0 +1,91 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.network.NetworkAddress; + +import java.util.ArrayList; +import java.util.List; + +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; + +public class IPSyntheticSourceNativeArrayIntegrationTests extends NativeArrayIntegrationTestCase { + + @Override + protected String getFieldTypeName() { + return "ip"; + } + + @Override + protected String getRandomValue() { + return NetworkAddress.format(randomIp(true)); + } + + public void testSynthesizeArray() throws Exception { + var arrayValues = new Object[][] { + new Object[] { "192.168.1.4", "192.168.1.3", null, "192.168.1.2", null, "192.168.1.1" }, + new Object[] { null, "192.168.1.2", null, "192.168.1.1" }, + new Object[] { null }, + new Object[] { null, null, null }, + new Object[] { "192.168.1.3", "192.168.1.2", "192.168.1.1" } }; + verifySyntheticArray(arrayValues); + } + + public void testSynthesizeArrayIgnoreMalformed() throws Exception { + var mapping = jsonBuilder().startObject() + .startObject("properties") + .startObject("field") + .field("type", "ip") + .field("ignore_malformed", true) + .endObject() + .endObject() + .endObject(); + // Note values that would be ignored are added at the end of arrays, + // this makes testing easier as ignored values are always synthesized after regular values: + var arrayValues = new Object[][] { + new Object[] { null, "192.168.1.1", "192.168.1.2", "192.168.1.3", "192.168.1.4", null, "malformed" }, + new Object[] { "192.168.1.1", "192.168.1.2", "malformed" }, + new Object[] { "192.168.1.1", "192.168.1.1", "malformed" }, + new Object[] { null, null, null, "malformed" }, + new Object[] { "192.168.1.3", "192.168.1.3", "192.168.1.1", "malformed" } }; + verifySyntheticArray(arrayValues, mapping, "_id", "field._ignore_malformed"); + } + + public void testSynthesizeObjectArray() throws Exception { + List> documents = new ArrayList<>(); + { + List document = new ArrayList<>(); + document.add(new Object[] { "192.168.1.3", "192.168.1.2", "192.168.1.1" }); + document.add(new Object[] { "192.168.1.110", "192.168.1.109", "192.168.1.111" }); + document.add(new Object[] { "192.168.1.2", "192.168.1.2", "192.168.1.1" }); + documents.add(document); + } + { + List document = new ArrayList<>(); + document.add(new Object[] { "192.168.1.9", "192.168.1.7", "192.168.1.5" }); + document.add(new Object[] { "192.168.1.2", "192.168.1.4", "192.168.1.6" }); + document.add(new Object[] { "192.168.1.7", "192.168.1.6", "192.168.1.5" }); + documents.add(document); + } + verifySyntheticObjectArray(documents); + } + + public void testSynthesizeArrayInObjectField() throws Exception { + List documents = new ArrayList<>(); + documents.add(new Object[] { "192.168.1.254", "192.168.1.253", "192.168.1.252" }); + documents.add(new Object[] { "192.168.1.112", "192.168.1.113", "192.168.1.114" }); + documents.add(new Object[] { "192.168.1.3", "192.168.1.2", "192.168.1.1" }); + documents.add(new Object[] { "192.168.1.9", "192.168.1.7", "192.168.1.5" }); + documents.add(new Object[] { "192.168.1.2", "192.168.1.4", "192.168.1.6" }); + documents.add(new Object[] { "192.168.1.7", "192.168.1.6", "192.168.1.5" }); + verifySyntheticArrayInObject(documents); + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java index 1b8a2d68cd930..17ecc2c22db28 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldMapperTests.java @@ -439,4 +439,9 @@ public void execute() { protected Function loadBlockExpected() { return v -> InetAddresses.toAddrString(InetAddressPoint.decode(BytesRef.deepCopyOf((BytesRef) v).bytes)); } + + @Override + protected String randomSyntheticSourceKeep() { + return "all"; + } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldTypeTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldTypeTests.java index b3064810e5ca3..7dec761691ce6 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/IpFieldTypeTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpFieldTypeTests.java @@ -349,16 +349,24 @@ public void testRangeQuery() { } public void testFetchSourceValue() throws IOException { - MappedFieldType mapper = new IpFieldMapper.Builder("field", ScriptCompiler.NONE, true, IndexVersion.current()).build( - MapperBuilderContext.root(false, false) - ).fieldType(); + MappedFieldType mapper = new IpFieldMapper.Builder( + "field", + ScriptCompiler.NONE, + true, + IndexVersion.current(), + Mapper.SourceKeepMode.NONE + ).build(MapperBuilderContext.root(false, false)).fieldType(); assertEquals(List.of("2001:db8::2:1"), fetchSourceValue(mapper, "2001:db8::2:1")); assertEquals(List.of("2001:db8::2:1"), fetchSourceValue(mapper, "2001:db8:0:0:0:0:2:1")); assertEquals(List.of("::1"), fetchSourceValue(mapper, "0:0:0:0:0:0:0:1")); - MappedFieldType nullValueMapper = new IpFieldMapper.Builder("field", ScriptCompiler.NONE, true, IndexVersion.current()).nullValue( - "2001:db8:0:0:0:0:2:7" - ).build(MapperBuilderContext.root(false, false)).fieldType(); + MappedFieldType nullValueMapper = new IpFieldMapper.Builder( + "field", + ScriptCompiler.NONE, + true, + IndexVersion.current(), + Mapper.SourceKeepMode.NONE + ).nullValue("2001:db8:0:0:0:0:2:7").build(MapperBuilderContext.root(false, false)).fieldType(); assertEquals(List.of("2001:db8::2:7"), fetchSourceValue(nullValueMapper, null)); } } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/IpOffsetDocValuesLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/IpOffsetDocValuesLoaderTests.java new file mode 100644 index 0000000000000..dadfd22199aec --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/IpOffsetDocValuesLoaderTests.java @@ -0,0 +1,41 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper; + +import org.elasticsearch.common.network.NetworkAddress; + +public class IpOffsetDocValuesLoaderTests extends OffsetDocValuesLoaderTestCase { + + public void testOffsetArray() throws Exception { + verifyOffsets("{\"field\":[\"192.168.1.1\",\"192.168.1.3\",\"192.168.1.2\",\"192.168.1.1\",\"192.168.1.9\",\"192.168.1.3\"]}"); + verifyOffsets("{\"field\":[\"192.168.1.4\",null,\"192.168.1.3\",\"192.168.1.2\",null,\"192.168.1.1\"]}"); + } + + public void testOffsetNestedArray() throws Exception { + verifyOffsets( + "{\"field\":[\"192.168.1.2\",[\"192.168.1.1\"],[\"192.168.1.0\"],null,\"192.168.1.0\"]}", + "{\"field\":[\"192.168.1.2\",\"192.168.1.1\",\"192.168.1.0\",null,\"192.168.1.0\"]}" + ); + verifyOffsets( + "{\"field\":[\"192.168.1.6\",[\"192.168.1.5\", [\"192.168.1.4\"]],[\"192.168.1.3\", [\"192.168.1.2\"]],null,\"192.168.1.1\"]}", + "{\"field\":[\"192.168.1.6\",\"192.168.1.5\",\"192.168.1.4\",\"192.168.1.3\",\"192.168.1.2\",null,\"192.168.1.1\"]}" + ); + } + + @Override + protected String getFieldTypeName() { + return "ip"; + } + + @Override + protected String randomValue() { + return NetworkAddress.format(randomIp(true)); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordOffsetDocValuesLoaderTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordOffsetDocValuesLoaderTests.java index 8300e8e8e4614..55e935e11996c 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordOffsetDocValuesLoaderTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordOffsetDocValuesLoaderTests.java @@ -9,145 +9,7 @@ package org.elasticsearch.index.mapper; -import org.apache.lucene.index.DirectoryReader; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.mapper.SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer.DocValuesWithOffsetsLoader; -import org.elasticsearch.xcontent.XContentBuilder; -import org.elasticsearch.xcontent.XContentType; - -import java.io.IOException; - -import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.nullValue; - -public class KeywordOffsetDocValuesLoaderTests extends MapperServiceTestCase { - - @Override - protected Settings getIndexSettings() { - return Settings.builder() - .put("index.mapping.source.mode", "synthetic") - .put("index.mapping.synthetic_source_keep", "arrays") - .build(); - } - - public void testOffsetArrayNoDocValues() throws Exception { - String mapping = """ - { - "_doc": { - "properties": { - "field": { - "type": "keyword", - "doc_values": false - } - } - } - } - """; - try (var mapperService = createMapperService(mapping)) { - var fieldMapper = mapperService.mappingLookup().getMapper("field"); - assertThat(fieldMapper.getOffsetFieldName(), nullValue()); - } - } - - public void testOffsetArrayStored() throws Exception { - String mapping = """ - { - "_doc": { - "properties": { - "field": { - "type": "keyword", - "store": true - } - } - } - } - """; - try (var mapperService = createMapperService(mapping)) { - var fieldMapper = mapperService.mappingLookup().getMapper("field"); - assertThat(fieldMapper.getOffsetFieldName(), nullValue()); - } - } - - public void testOffsetMultiFields() throws Exception { - String mapping = """ - { - "_doc": { - "properties": { - "field": { - "type": "keyword", - "fields": { - "sub": { - "type": "text" - } - } - } - } - } - } - """; - try (var mapperService = createMapperService(mapping)) { - var fieldMapper = mapperService.mappingLookup().getMapper("field"); - assertThat(fieldMapper.getOffsetFieldName(), nullValue()); - } - } - - public void testOffsetArrayNoSyntheticSource() throws Exception { - String mapping = """ - { - "_doc": { - "properties": { - "field": { - "type": "keyword" - } - } - } - } - """; - try (var mapperService = createMapperService(Settings.EMPTY, mapping)) { - var fieldMapper = mapperService.mappingLookup().getMapper("field"); - assertThat(fieldMapper.getOffsetFieldName(), nullValue()); - } - } - - public void testOffsetArrayNoSourceArrayKeep() throws Exception { - var settingsBuilder = Settings.builder().put("index.mapping.source.mode", "synthetic"); - String mapping; - if (randomBoolean()) { - mapping = """ - { - "_doc": { - "properties": { - "field": { - "type": "keyword", - "synthetic_source_keep": "{{synthetic_source_keep}}" - } - } - } - } - """.replace("{{synthetic_source_keep}}", randomBoolean() ? "none" : "all"); - } else { - mapping = """ - { - "_doc": { - "properties": { - "field": { - "type": "keyword" - } - } - } - } - """; - if (randomBoolean()) { - settingsBuilder.put("index.mapping.synthetic_source_keep", "none"); - } - } - try (var mapperService = createMapperService(settingsBuilder.build(), mapping)) { - var fieldMapper = mapperService.mappingLookup().getMapper("field"); - assertThat(fieldMapper.getOffsetFieldName(), nullValue()); - } - } +public class KeywordOffsetDocValuesLoaderTests extends OffsetDocValuesLoaderTestCase { public void testOffsetArray() throws Exception { verifyOffsets("{\"field\":[\"z\",\"x\",\"y\",\"c\",\"b\",\"a\"]}"); @@ -162,76 +24,13 @@ public void testOffsetNestedArray() throws Exception { ); } - public void testOffsetEmptyArray() throws Exception { - verifyOffsets("{\"field\":[]}"); - } - - public void testOffsetArrayWithNulls() throws Exception { - verifyOffsets("{\"field\":[null,null,null]}"); - } - - public void testOffsetArrayRandom() throws Exception { - StringBuilder values = new StringBuilder(); - int numValues = randomIntBetween(0, 256); - for (int i = 0; i < numValues; i++) { - if (randomInt(10) == 1) { - values.append("null"); - } else { - values.append('"').append(randomAlphanumericOfLength(2)).append('"'); - } - if (i != (numValues - 1)) { - values.append(','); - } - } - verifyOffsets("{\"field\":[" + values + "]}"); - } - - private void verifyOffsets(String source) throws IOException { - verifyOffsets(source, source); - } - - private void verifyOffsets(String source, String expectedSource) throws IOException { - String mapping = """ - { - "_doc": { - "properties": { - "field": { - "type": "keyword" - } - } - } - } - """; - verifyOffsets(mapping, source, expectedSource); + @Override + protected String getFieldTypeName() { + return "keyword"; } - private void verifyOffsets(String mapping, String source, String expectedSource) throws IOException { - try (var mapperService = createMapperService(mapping)) { - var mapper = mapperService.documentMapper(); - - try (var directory = newDirectory()) { - var iw = indexWriterForSyntheticSource(directory); - var doc = mapper.parse(new SourceToParse("_id", new BytesArray(source), XContentType.JSON)); - doc.updateSeqID(0, 0); - doc.version().setLongValue(0); - iw.addDocuments(doc.docs()); - iw.close(); - try (var indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { - var layer = new SortedSetWithOffsetsDocValuesSyntheticFieldLoaderLayer("field", "field.offsets"); - var leafReader = indexReader.leaves().getFirst().reader(); - var loader = (DocValuesWithOffsetsLoader) layer.docValuesLoader(leafReader, new int[] { 0 }); - assertTrue(loader.advanceToDoc(0)); - assertTrue(loader.count() > 0); - XContentBuilder builder = jsonBuilder().startObject(); - builder.startArray("field"); - loader.write(builder); - builder.endArray().endObject(); - - var actual = Strings.toString(builder); - assertEquals(expectedSource, actual); - } - } - } + @Override + protected String randomValue() { + return randomAlphanumericOfLength(2); } - } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/KeywordSyntheticSourceNativeArrayIntegrationTests.java b/server/src/test/java/org/elasticsearch/index/mapper/KeywordSyntheticSourceNativeArrayIntegrationTests.java index f0853c34b2097..6f59f617ba259 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/KeywordSyntheticSourceNativeArrayIntegrationTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/KeywordSyntheticSourceNativeArrayIntegrationTests.java @@ -9,38 +9,24 @@ package org.elasticsearch.index.mapper; -import org.apache.lucene.index.DocValuesType; -import org.apache.lucene.index.FieldInfo; -import org.apache.lucene.index.FieldInfos; -import org.apache.lucene.index.IndexableField; -import org.apache.lucene.index.LeafReader; -import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; -import org.elasticsearch.action.index.IndexRequest; -import org.elasticsearch.action.search.SearchRequest; -import org.elasticsearch.action.support.WriteRequest; -import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.query.IdsQueryBuilder; -import org.elasticsearch.test.ESSingleNodeTestCase; -import org.elasticsearch.xcontent.XContentBuilder; -import org.hamcrest.Matchers; +import com.carrotsearch.randomizedtesting.generators.RandomStrings; -import java.io.IOException; import java.util.ArrayList; -import java.util.LinkedHashSet; import java.util.List; -import java.util.Map; -import java.util.Set; import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; -import static org.hamcrest.Matchers.contains; -import static org.hamcrest.Matchers.containsInAnyOrder; -import static org.hamcrest.Matchers.empty; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.hasKey; -import static org.hamcrest.Matchers.nullValue; -public class KeywordSyntheticSourceNativeArrayIntegrationTests extends ESSingleNodeTestCase { +public class KeywordSyntheticSourceNativeArrayIntegrationTests extends NativeArrayIntegrationTestCase { + + @Override + protected String getFieldTypeName() { + return "keyword"; + } + + @Override + protected String getRandomValue() { + return RandomStrings.randomAsciiOfLength(random(), 8); + } public void testSynthesizeArray() throws Exception { var arrayValues = new Object[][] { @@ -52,16 +38,6 @@ public void testSynthesizeArray() throws Exception { verifySyntheticArray(arrayValues); } - public void testSynthesizeEmptyArray() throws Exception { - var arrayValues = new Object[][] { new Object[] {} }; - verifySyntheticArray(arrayValues); - } - - public void testSynthesizeArrayRandom() throws Exception { - var arrayValues = new Object[][] { generateRandomStringArray(64, 8, false, true) }; - verifySyntheticArray(arrayValues); - } - public void testSynthesizeArrayIgnoreAbove() throws Exception { var mapping = jsonBuilder().startObject() .startObject("properties") @@ -79,7 +55,7 @@ public void testSynthesizeArrayIgnoreAbove() throws Exception { new Object[] { "123", "1234", "12345" }, new Object[] { null, null, null, "blabla" }, new Object[] { "1", "2", "3", "blabla" } }; - verifySyntheticArray(arrayValues, mapping, 4, "_id", "field._original"); + verifySyntheticArray(arrayValues, mapping, "_id", "field._original"); } public void testSynthesizeObjectArray() throws Exception { @@ -112,237 +88,4 @@ public void testSynthesizeArrayInObjectField() throws Exception { verifySyntheticArrayInObject(documents); } - public void testSynthesizeArrayInObjectFieldRandom() throws Exception { - List documents = new ArrayList<>(); - int numDocs = randomIntBetween(8, 256); - for (int i = 0; i < numDocs; i++) { - documents.add(generateRandomStringArray(64, 8, false, true)); - } - verifySyntheticArrayInObject(documents); - } - - private void verifySyntheticArray(Object[][] arrays) throws IOException { - var mapping = jsonBuilder().startObject() - .startObject("properties") - .startObject("field") - .field("type", "keyword") - .endObject() - .endObject() - .endObject(); - verifySyntheticArray(arrays, mapping, null, "_id"); - } - - private void verifySyntheticArray(Object[][] arrays, XContentBuilder mapping, Integer ignoreAbove, String... expectedStoredFields) - throws IOException { - var indexService = createIndex( - "test-index", - Settings.builder().put("index.mapping.source.mode", "synthetic").put("index.mapping.synthetic_source_keep", "arrays").build(), - mapping - ); - for (int i = 0; i < arrays.length; i++) { - var array = arrays[i]; - - var indexRequest = new IndexRequest("test-index"); - indexRequest.id("my-id-" + i); - var source = jsonBuilder().startObject(); - if (array != null) { - source.startArray("field"); - for (Object arrayValue : array) { - source.value(arrayValue); - } - source.endArray(); - } else { - source.field("field").nullValue(); - } - indexRequest.source(source.endObject()); - indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - client().index(indexRequest).actionGet(); - - var searchRequest = new SearchRequest("test-index"); - searchRequest.source().query(new IdsQueryBuilder().addIds("my-id-" + i)); - var searchResponse = client().search(searchRequest).actionGet(); - try { - var hit = searchResponse.getHits().getHits()[0]; - assertThat(hit.getId(), equalTo("my-id-" + i)); - var sourceAsMap = hit.getSourceAsMap(); - assertThat(sourceAsMap, hasKey("field")); - var actualArray = (List) sourceAsMap.get("field"); - if (array == null) { - assertThat(actualArray, nullValue()); - } else if (array.length == 0) { - assertThat(actualArray, empty()); - } else { - assertThat(actualArray, Matchers.contains(array)); - } - } finally { - searchResponse.decRef(); - } - } - - try (var searcher = indexService.getShard(0).acquireSearcher(getTestName())) { - var reader = searcher.getDirectoryReader(); - for (int i = 0; i < arrays.length; i++) { - var document = reader.storedFields().document(i); - // Verify that there is no ignored source: - Set storedFieldNames = new LinkedHashSet<>(document.getFields().stream().map(IndexableField::name).toList()); - if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled()) { - assertThat(storedFieldNames, contains(expectedStoredFields)); - } else { - String[] copyExpectedStoredFields = new String[expectedStoredFields.length + 1]; - System.arraycopy(expectedStoredFields, 0, copyExpectedStoredFields, 0, expectedStoredFields.length); - copyExpectedStoredFields[copyExpectedStoredFields.length - 1] = "_recovery_source"; - assertThat(storedFieldNames, containsInAnyOrder(copyExpectedStoredFields)); - } - } - var fieldInfo = FieldInfos.getMergedFieldInfos(reader).fieldInfo("field.offsets"); - assertThat(fieldInfo.getDocValuesType(), equalTo(DocValuesType.SORTED)); - } - } - - private void verifySyntheticObjectArray(List> documents) throws IOException { - var indexService = createIndex( - "test-index", - Settings.builder().put("index.mapping.source.mode", "synthetic").put("index.mapping.synthetic_source_keep", "arrays").build(), - jsonBuilder().startObject() - .startObject("properties") - .startObject("object") - .startObject("properties") - .startObject("field") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ); - for (int i = 0; i < documents.size(); i++) { - var document = documents.get(i); - - var indexRequest = new IndexRequest("test-index"); - indexRequest.id("my-id-" + i); - var source = jsonBuilder().startObject(); - source.startArray("object"); - for (Object[] arrayValue : document) { - source.startObject(); - source.array("field", arrayValue); - source.endObject(); - } - source.endArray(); - indexRequest.source(source.endObject()); - indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - client().index(indexRequest).actionGet(); - - var searchRequest = new SearchRequest("test-index"); - searchRequest.source().query(new IdsQueryBuilder().addIds("my-id-" + i)); - var searchResponse = client().search(searchRequest).actionGet(); - try { - var hit = searchResponse.getHits().getHits()[0]; - assertThat(hit.getId(), equalTo("my-id-" + i)); - var sourceAsMap = hit.getSourceAsMap(); - var objectArray = (List) sourceAsMap.get("object"); - for (int j = 0; j < document.size(); j++) { - var expected = document.get(j); - List actual = (List) ((Map) objectArray.get(j)).get("field"); - assertThat(actual, Matchers.contains(expected)); - } - } finally { - searchResponse.decRef(); - } - } - - indexService.getShard(0).forceMerge(new ForceMergeRequest("test-index").maxNumSegments(1)); - try (var searcher = indexService.getShard(0).acquireSearcher(getTestName())) { - var reader = searcher.getDirectoryReader(); - for (int i = 0; i < documents.size(); i++) { - var document = reader.storedFields().document(i); - // Verify that there is ignored source because of leaf array being wrapped by object array: - List storedFieldNames = document.getFields().stream().map(IndexableField::name).toList(); - if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled()) { - assertThat(storedFieldNames, contains("_id", "_ignored_source")); - } else { - assertThat(storedFieldNames, containsInAnyOrder("_id", "_ignored_source", "_recovery_source")); - } - - // Verify that there is no offset field: - LeafReader leafReader = reader.leaves().get(0).reader(); - for (FieldInfo fieldInfo : leafReader.getFieldInfos()) { - String name = fieldInfo.getName(); - assertFalse("expected no field that contains [offsets] in name, but found [" + name + "]", name.contains("offsets")); - } - - var binaryDocValues = leafReader.getBinaryDocValues("object.field.offsets"); - assertThat(binaryDocValues, nullValue()); - } - } - } - - private void verifySyntheticArrayInObject(List documents) throws IOException { - var indexService = createIndex( - "test-index", - Settings.builder().put("index.mapping.source.mode", "synthetic").put("index.mapping.synthetic_source_keep", "arrays").build(), - jsonBuilder().startObject() - .startObject("properties") - .startObject("object") - .startObject("properties") - .startObject("field") - .field("type", "keyword") - .endObject() - .endObject() - .endObject() - .endObject() - .endObject() - ); - for (int i = 0; i < documents.size(); i++) { - var arrayValue = documents.get(i); - - var indexRequest = new IndexRequest("test-index"); - indexRequest.id("my-id-" + i); - var source = jsonBuilder().startObject(); - source.startObject("object"); - source.array("field", arrayValue); - source.endObject(); - indexRequest.source(source.endObject()); - indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); - client().index(indexRequest).actionGet(); - - var searchRequest = new SearchRequest("test-index"); - searchRequest.source().query(new IdsQueryBuilder().addIds("my-id-" + i)); - var searchResponse = client().search(searchRequest).actionGet(); - try { - var hit = searchResponse.getHits().getHits()[0]; - assertThat(hit.getId(), equalTo("my-id-" + i)); - var sourceAsMap = hit.getSourceAsMap(); - var objectArray = (Map) sourceAsMap.get("object"); - - List actual = (List) objectArray.get("field"); - if (arrayValue == null) { - assertThat(actual, nullValue()); - } else if (arrayValue.length == 0) { - assertThat(actual, empty()); - } else { - assertThat(actual, Matchers.contains(arrayValue)); - } - } finally { - searchResponse.decRef(); - } - } - - indexService.getShard(0).forceMerge(new ForceMergeRequest("test-index").maxNumSegments(1)); - try (var searcher = indexService.getShard(0).acquireSearcher(getTestName())) { - var reader = searcher.getDirectoryReader(); - for (int i = 0; i < documents.size(); i++) { - var document = reader.storedFields().document(i); - // Verify that there is no ignored source: - Set storedFieldNames = new LinkedHashSet<>(document.getFields().stream().map(IndexableField::name).toList()); - if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled()) { - assertThat(storedFieldNames, contains("_id")); - } else { - assertThat(storedFieldNames, containsInAnyOrder("_id", "_recovery_source")); - } - } - var fieldInfo = FieldInfos.getMergedFieldInfos(reader).fieldInfo("object.field.offsets"); - assertThat(fieldInfo.getDocValuesType(), equalTo(DocValuesType.SORTED)); - } - } - } diff --git a/server/src/test/java/org/elasticsearch/index/mapper/NativeArrayIntegrationTestCase.java b/server/src/test/java/org/elasticsearch/index/mapper/NativeArrayIntegrationTestCase.java new file mode 100644 index 0000000000000..e074bf883ae16 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/NativeArrayIntegrationTestCase.java @@ -0,0 +1,299 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.index.DocValuesType; +import org.apache.lucene.index.FieldInfo; +import org.apache.lucene.index.FieldInfos; +import org.apache.lucene.index.IndexableField; +import org.apache.lucene.index.LeafReader; +import org.elasticsearch.action.admin.indices.forcemerge.ForceMergeRequest; +import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.action.search.SearchRequest; +import org.elasticsearch.action.support.WriteRequest; +import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.query.IdsQueryBuilder; +import org.elasticsearch.test.ESSingleNodeTestCase; +import org.elasticsearch.xcontent.XContentBuilder; +import org.hamcrest.Matchers; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.LinkedHashSet; +import java.util.List; +import java.util.Map; +import java.util.Set; + +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.contains; +import static org.hamcrest.Matchers.containsInAnyOrder; +import static org.hamcrest.Matchers.empty; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.nullValue; + +public abstract class NativeArrayIntegrationTestCase extends ESSingleNodeTestCase { + + public void testSynthesizeEmptyArray() throws Exception { + var arrayValues = new Object[][] { new Object[] {} }; + verifySyntheticArray(arrayValues); + } + + public void testSynthesizeArrayRandom() throws Exception { + var arrayValues = new Object[randomInt(64)]; + for (int j = 0; j < arrayValues.length; j++) { + arrayValues[j] = NetworkAddress.format(randomIp(true)); + } + verifySyntheticArray(new Object[][] { arrayValues }); + } + + public void testSynthesizeArrayInObjectFieldRandom() throws Exception { + List documents = new ArrayList<>(); + int numDocs = randomIntBetween(8, 256); + for (int i = 0; i < numDocs; i++) { + Object[] document = new Object[randomInt(64)]; + for (int j = 0; j < document.length; j++) { + document[j] = getRandomValue(); + } + documents.add(document); + } + verifySyntheticArrayInObject(documents); + } + + protected abstract String getFieldTypeName(); + + protected abstract String getRandomValue(); + + protected void verifySyntheticArray(Object[][] arrays) throws IOException { + var mapping = jsonBuilder().startObject() + .startObject("properties") + .startObject("field") + .field("type", getFieldTypeName()) + .endObject() + .endObject() + .endObject(); + verifySyntheticArray(arrays, mapping, "_id"); + } + + protected void verifySyntheticArray(Object[][] arrays, XContentBuilder mapping, String... expectedStoredFields) throws IOException { + var indexService = createIndex( + "test-index", + Settings.builder().put("index.mapping.source.mode", "synthetic").put("index.mapping.synthetic_source_keep", "arrays").build(), + mapping + ); + for (int i = 0; i < arrays.length; i++) { + var array = arrays[i]; + + var indexRequest = new IndexRequest("test-index"); + indexRequest.id("my-id-" + i); + var source = jsonBuilder().startObject(); + if (array != null) { + source.startArray("field"); + for (Object arrayValue : array) { + source.value(arrayValue); + } + source.endArray(); + } else { + source.field("field").nullValue(); + } + indexRequest.source(source.endObject()); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + client().index(indexRequest).actionGet(); + + var searchRequest = new SearchRequest("test-index"); + searchRequest.source().query(new IdsQueryBuilder().addIds("my-id-" + i)); + var searchResponse = client().search(searchRequest).actionGet(); + try { + var hit = searchResponse.getHits().getHits()[0]; + assertThat(hit.getId(), equalTo("my-id-" + i)); + var sourceAsMap = hit.getSourceAsMap(); + assertThat(sourceAsMap, hasKey("field")); + var actualArray = (List) sourceAsMap.get("field"); + if (array == null) { + assertThat(actualArray, nullValue()); + } else if (array.length == 0) { + assertThat(actualArray, empty()); + } else { + assertThat(actualArray, Matchers.contains(array)); + } + } finally { + searchResponse.decRef(); + } + } + + try (var searcher = indexService.getShard(0).acquireSearcher(getTestName())) { + var reader = searcher.getDirectoryReader(); + for (int i = 0; i < arrays.length; i++) { + var document = reader.storedFields().document(i); + // Verify that there is no ignored source: + Set storedFieldNames = new LinkedHashSet<>(document.getFields().stream().map(IndexableField::name).toList()); + if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled()) { + assertThat(storedFieldNames, contains(expectedStoredFields)); + } else { + var copyExpectedStoredFields = new String[expectedStoredFields.length + 1]; + System.arraycopy(expectedStoredFields, 0, copyExpectedStoredFields, 0, expectedStoredFields.length); + copyExpectedStoredFields[copyExpectedStoredFields.length - 1] = "_ignored_source"; + assertThat(storedFieldNames, containsInAnyOrder(copyExpectedStoredFields)); + } + } + var fieldInfo = FieldInfos.getMergedFieldInfos(reader).fieldInfo("field.offsets"); + assertThat(fieldInfo.getDocValuesType(), equalTo(DocValuesType.SORTED)); + } + } + + protected void verifySyntheticObjectArray(List> documents) throws IOException { + var indexService = createIndex( + "test-index", + Settings.builder().put("index.mapping.source.mode", "synthetic").put("index.mapping.synthetic_source_keep", "arrays").build(), + jsonBuilder().startObject() + .startObject("properties") + .startObject("object") + .startObject("properties") + .startObject("field") + .field("type", getFieldTypeName()) + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ); + for (int i = 0; i < documents.size(); i++) { + var document = documents.get(i); + + var indexRequest = new IndexRequest("test-index"); + indexRequest.id("my-id-" + i); + var source = jsonBuilder().startObject(); + source.startArray("object"); + for (Object[] arrayValue : document) { + source.startObject(); + source.array("field", arrayValue); + source.endObject(); + } + source.endArray(); + indexRequest.source(source.endObject()); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + client().index(indexRequest).actionGet(); + + var searchRequest = new SearchRequest("test-index"); + searchRequest.source().query(new IdsQueryBuilder().addIds("my-id-" + i)); + var searchResponse = client().search(searchRequest).actionGet(); + try { + var hit = searchResponse.getHits().getHits()[0]; + assertThat(hit.getId(), equalTo("my-id-" + i)); + var sourceAsMap = hit.getSourceAsMap(); + var objectArray = (List) sourceAsMap.get("object"); + for (int j = 0; j < document.size(); j++) { + var expected = document.get(j); + List actual = (List) ((Map) objectArray.get(j)).get("field"); + assertThat(actual, Matchers.contains(expected)); + } + } finally { + searchResponse.decRef(); + } + } + + indexService.getShard(0).forceMerge(new ForceMergeRequest("test-index").maxNumSegments(1)); + try (var searcher = indexService.getShard(0).acquireSearcher(getTestName())) { + var reader = searcher.getDirectoryReader(); + for (int i = 0; i < documents.size(); i++) { + var document = reader.storedFields().document(i); + // Verify that there is ignored source because of leaf array being wrapped by object array: + List storedFieldNames = document.getFields().stream().map(IndexableField::name).toList(); + if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled()) { + assertThat(storedFieldNames, contains("_id", "_ignored_source")); + } else { + assertThat(storedFieldNames, containsInAnyOrder("_id", "_ignored_source", "_recovery_source")); + } + + // Verify that there is no offset field: + LeafReader leafReader = reader.leaves().get(0).reader(); + for (FieldInfo fieldInfo : leafReader.getFieldInfos()) { + String name = fieldInfo.getName(); + assertFalse("expected no field that contains [offsets] in name, but found [" + name + "]", name.contains("offsets")); + } + + var binaryDocValues = leafReader.getBinaryDocValues("object.field.offsets"); + assertThat(binaryDocValues, nullValue()); + } + } + } + + protected void verifySyntheticArrayInObject(List documents) throws IOException { + var indexService = createIndex( + "test-index", + Settings.builder().put("index.mapping.source.mode", "synthetic").put("index.mapping.synthetic_source_keep", "arrays").build(), + jsonBuilder().startObject() + .startObject("properties") + .startObject("object") + .startObject("properties") + .startObject("field") + .field("type", "keyword") + .endObject() + .endObject() + .endObject() + .endObject() + .endObject() + ); + for (int i = 0; i < documents.size(); i++) { + var arrayValue = documents.get(i); + + var indexRequest = new IndexRequest("test-index"); + indexRequest.id("my-id-" + i); + var source = jsonBuilder().startObject(); + source.startObject("object"); + source.array("field", arrayValue); + source.endObject(); + indexRequest.source(source.endObject()); + indexRequest.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE); + client().index(indexRequest).actionGet(); + + var searchRequest = new SearchRequest("test-index"); + searchRequest.source().query(new IdsQueryBuilder().addIds("my-id-" + i)); + var searchResponse = client().search(searchRequest).actionGet(); + try { + var hit = searchResponse.getHits().getHits()[0]; + assertThat(hit.getId(), equalTo("my-id-" + i)); + var sourceAsMap = hit.getSourceAsMap(); + var objectArray = (Map) sourceAsMap.get("object"); + + List actual = (List) objectArray.get("field"); + if (arrayValue == null) { + assertThat(actual, nullValue()); + } else if (arrayValue.length == 0) { + assertThat(actual, empty()); + } else { + assertThat(actual, Matchers.contains(arrayValue)); + } + } finally { + searchResponse.decRef(); + } + } + + indexService.getShard(0).forceMerge(new ForceMergeRequest("test-index").maxNumSegments(1)); + try (var searcher = indexService.getShard(0).acquireSearcher(getTestName())) { + var reader = searcher.getDirectoryReader(); + for (int i = 0; i < documents.size(); i++) { + var document = reader.storedFields().document(i); + // Verify that there is no ignored source: + Set storedFieldNames = new LinkedHashSet<>(document.getFields().stream().map(IndexableField::name).toList()); + if (IndexSettings.RECOVERY_USE_SYNTHETIC_SOURCE.isEnabled()) { + assertThat(storedFieldNames, contains("_id")); + } else { + assertThat(storedFieldNames, containsInAnyOrder("_id", "_recovery_source")); + } + } + var fieldInfo = FieldInfos.getMergedFieldInfos(reader).fieldInfo("object.field.offsets"); + assertThat(fieldInfo.getDocValuesType(), equalTo(DocValuesType.SORTED)); + } + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/mapper/OffsetDocValuesLoaderTestCase.java b/server/src/test/java/org/elasticsearch/index/mapper/OffsetDocValuesLoaderTestCase.java new file mode 100644 index 0000000000000..bb12067aefd18 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/mapper/OffsetDocValuesLoaderTestCase.java @@ -0,0 +1,230 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the "Elastic License + * 2.0", the "GNU Affero General Public License v3.0 only", and the "Server Side + * Public License v 1"; you may not use this file except in compliance with, at + * your election, the "Elastic License 2.0", the "GNU Affero General Public + * License v3.0 only", or the "Server Side Public License, v 1". + */ + +package org.elasticsearch.index.mapper; + +import org.apache.lucene.index.DirectoryReader; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.xcontent.XContentBuilder; +import org.elasticsearch.xcontent.XContentType; + +import java.io.IOException; + +import static org.elasticsearch.xcontent.XContentFactory.jsonBuilder; +import static org.hamcrest.Matchers.nullValue; + +public abstract class OffsetDocValuesLoaderTestCase extends MapperServiceTestCase { + + @Override + protected Settings getIndexSettings() { + return Settings.builder() + .put("index.mapping.source.mode", "synthetic") + .put("index.mapping.synthetic_source_keep", "arrays") + .build(); + } + + public void testOffsetArrayNoDocValues() throws Exception { + String mapping = """ + { + "_doc": { + "properties": { + "field": { + "type": "{{type}}", + "doc_values": false + } + } + } + } + """.replace("{{type}}", getFieldTypeName()); + try (var mapperService = createMapperService(mapping)) { + var fieldMapper = mapperService.mappingLookup().getMapper("field"); + assertThat(fieldMapper.getOffsetFieldName(), nullValue()); + } + } + + public void testOffsetArrayStored() throws Exception { + String mapping = """ + { + "_doc": { + "properties": { + "field": { + "type": "{{type}}", + "store": true + } + } + } + } + """.replace("{{type}}", getFieldTypeName()); + ; + try (var mapperService = createMapperService(mapping)) { + var fieldMapper = mapperService.mappingLookup().getMapper("field"); + assertThat(fieldMapper.getOffsetFieldName(), nullValue()); + } + } + + public void testOffsetMultiFields() throws Exception { + String mapping = """ + { + "_doc": { + "properties": { + "field": { + "type": "{{type}}", + "fields": { + "sub": { + "type": "text" + } + } + } + } + } + } + """.replace("{{type}}", getFieldTypeName()); + try (var mapperService = createMapperService(mapping)) { + var fieldMapper = mapperService.mappingLookup().getMapper("field"); + assertThat(fieldMapper.getOffsetFieldName(), nullValue()); + } + } + + public void testOffsetArrayNoSyntheticSource() throws Exception { + String mapping = """ + { + "_doc": { + "properties": { + "field": { + "type": "{{type}}" + } + } + } + } + """.replace("{{type}}", getFieldTypeName()); + try (var mapperService = createMapperService(Settings.EMPTY, mapping)) { + var fieldMapper = mapperService.mappingLookup().getMapper("field"); + assertThat(fieldMapper.getOffsetFieldName(), nullValue()); + } + } + + public void testOffsetArrayNoSourceArrayKeep() throws Exception { + var settingsBuilder = Settings.builder().put("index.mapping.source.mode", "synthetic"); + String mapping; + if (randomBoolean()) { + mapping = """ + { + "_doc": { + "properties": { + "field": { + "type": "{{type}}", + "synthetic_source_keep": "{{synthetic_source_keep}}" + } + } + } + } + """.replace("{{synthetic_source_keep}}", randomBoolean() ? "none" : "all").replace("{{type}}", getFieldTypeName()); + } else { + mapping = """ + { + "_doc": { + "properties": { + "field": { + "type": "{{type}}" + } + } + } + } + """.replace("{{type}}", getFieldTypeName()); + if (randomBoolean()) { + settingsBuilder.put("index.mapping.synthetic_source_keep", "none"); + } + } + try (var mapperService = createMapperService(settingsBuilder.build(), mapping)) { + var fieldMapper = mapperService.mappingLookup().getMapper("field"); + assertThat(fieldMapper.getOffsetFieldName(), nullValue()); + } + } + + public void testOffsetEmptyArray() throws Exception { + verifyOffsets("{\"field\":[]}"); + } + + public void testOffsetArrayWithNulls() throws Exception { + verifyOffsets("{\"field\":[null,null,null]}"); + verifyOffsets("{\"field\":[null,[null],null]}", "{\"field\":[null,null,null]}"); + } + + public void testOffsetArrayRandom() throws Exception { + StringBuilder values = new StringBuilder(); + int numValues = randomIntBetween(0, 256); + for (int i = 0; i < numValues; i++) { + if (randomInt(10) == 1) { + values.append("null"); + } else { + String randomValue = randomValue(); + values.append('"').append(randomValue).append('"'); + } + if (i != (numValues - 1)) { + values.append(','); + } + } + verifyOffsets("{\"field\":[" + values + "]}"); + } + + protected abstract String getFieldTypeName(); + + protected abstract String randomValue(); + + protected void verifyOffsets(String source) throws IOException { + verifyOffsets(source, source); + } + + protected void verifyOffsets(String source, String expectedSource) throws IOException { + String mapping = """ + { + "_doc": { + "properties": { + "field": { + "type": "{{type}}" + } + } + } + } + """.replace("{{type}}", getFieldTypeName()); + verifyOffsets(mapping, source, expectedSource); + } + + private void verifyOffsets(String mapping, String source, String expectedSource) throws IOException { + try (var mapperService = createMapperService(mapping)) { + var mapper = mapperService.documentMapper(); + + try (var directory = newDirectory()) { + var iw = indexWriterForSyntheticSource(directory); + var doc = mapper.parse(new SourceToParse("_id", new BytesArray(source), XContentType.JSON)); + doc.updateSeqID(0, 0); + doc.version().setLongValue(0); + iw.addDocuments(doc.docs()); + iw.close(); + try (var indexReader = wrapInMockESDirectoryReader(DirectoryReader.open(directory))) { + FieldMapper fieldMapper = (FieldMapper) mapper.mappers().getMapper("field"); + var syntheticSourceLoader = fieldMapper.syntheticFieldLoader(); + var leafReader = indexReader.leaves().getFirst().reader(); + var docValueLoader = syntheticSourceLoader.docValuesLoader(leafReader, new int[] { 0 }); + assertTrue(docValueLoader.advanceToDoc(0)); + assertTrue(syntheticSourceLoader.hasValue()); + XContentBuilder builder = jsonBuilder().startObject(); + syntheticSourceLoader.write(builder); + builder.endObject(); + + var actual = Strings.toString(builder); + assertEquals(expectedSource, actual); + } + } + } + } + +} From f929270431114d6a6dc1034111b2a760ce75d2e0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Lorenzo=20Dematt=C3=A9?= Date: Tue, 25 Feb 2025 15:09:30 +0100 Subject: [PATCH 13/20] Fix how we suppress logs for self-tests (#123361) --- .../bootstrap/EntitlementBootstrap.java | 12 +++-- .../EntitlementInitialization.java | 3 +- .../runtime/policy/PolicyManager.java | 14 ++--- .../runtime/policy/PolicyManagerTests.java | 42 ++++++++++----- .../bootstrap/Elasticsearch.java | 51 ++++++++++--------- 5 files changed, 74 insertions(+), 48 deletions(-) diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java index 06e985913c9b4..6a80ec75751c5 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/bootstrap/EntitlementBootstrap.java @@ -24,6 +24,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Map; +import java.util.Set; import java.util.function.Function; import java.util.stream.Stream; @@ -42,7 +43,8 @@ public record BootstrapArgs( Path libDir, Path logsDir, Path tempDir, - Path pidFile + Path pidFile, + Set> suppressFailureLogClasses ) { public BootstrapArgs { requireNonNull(pluginPolicies); @@ -58,6 +60,7 @@ public record BootstrapArgs( requireNonNull(libDir); requireNonNull(logsDir); requireNonNull(tempDir); + requireNonNull(suppressFailureLogClasses); } } @@ -82,6 +85,7 @@ public static BootstrapArgs bootstrapArgs() { * @param tempDir the temp directory for Elasticsearch * @param logsDir the log directory for Elasticsearch * @param pidFile path to a pid file for Elasticsearch, or {@code null} if one was not specified + * @param suppressFailureLogClasses classes for which we do not need or want to log Entitlements failures */ public static void bootstrap( Map pluginPolicies, @@ -94,7 +98,8 @@ public static void bootstrap( Path libDir, Path logsDir, Path tempDir, - Path pidFile + Path pidFile, + Set> suppressFailureLogClasses ) { logger.debug("Loading entitlement agent"); if (EntitlementBootstrap.bootstrapArgs != null) { @@ -111,7 +116,8 @@ public static void bootstrap( libDir, logsDir, tempDir, - pidFile + pidFile, + suppressFailureLogClasses ); exportInitializationToAgent(); loadAgent(findAgentJar()); diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index d3d09e0f84aee..055660d54079b 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -272,7 +272,8 @@ private static PolicyManager createPolicyManager() { resolver, AGENTS_PACKAGE_NAME, ENTITLEMENTS_MODULE, - pathLookup + pathLookup, + bootstrapArgs.suppressFailureLogClasses() ); } diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java index b6296fe5d4713..cf3775474b79a 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/runtime/policy/PolicyManager.java @@ -11,7 +11,6 @@ import org.elasticsearch.core.Strings; import org.elasticsearch.core.SuppressForbidden; -import org.elasticsearch.entitlement.bootstrap.EntitlementBootstrap; import org.elasticsearch.entitlement.instrumentation.InstrumentationService; import org.elasticsearch.entitlement.runtime.api.NotEntitledException; import org.elasticsearch.entitlement.runtime.policy.entitlements.CreateClassLoaderEntitlement; @@ -114,6 +113,7 @@ ModuleEntitlements policyEntitlements(String componentName, List en private final Function, String> pluginResolver; private final PathLookup pathLookup; private final FileAccessTree defaultFileAccess; + private final Set> mutedClasses; public static final String ALL_UNNAMED = "ALL-UNNAMED"; @@ -150,7 +150,8 @@ public PolicyManager( Function, String> pluginResolver, String apmAgentPackageName, Module entitlementsModule, - PathLookup pathLookup + PathLookup pathLookup, + Set> suppressFailureLogClasses ) { this.serverEntitlements = buildScopeEntitlementsMap(requireNonNull(serverPolicy)); this.apmAgentEntitlements = apmAgentEntitlements; @@ -162,6 +163,7 @@ public PolicyManager( this.entitlementsModule = entitlementsModule; this.pathLookup = requireNonNull(pathLookup); this.defaultFileAccess = FileAccessTree.of(FilesEntitlement.EMPTY, pathLookup); + this.mutedClasses = suppressFailureLogClasses; for (var e : serverEntitlements.entrySet()) { validateEntitlementsPerModule(SERVER_COMPONENT_NAME, e.getKey(), e.getValue()); @@ -386,7 +388,7 @@ public void checkAllNetworkAccess(Class callerClass) { checkFlagEntitlement(classEntitlements, OutboundNetworkEntitlement.class, requestingClass, callerClass); } - private static void checkFlagEntitlement( + private void checkFlagEntitlement( ModuleEntitlements classEntitlements, Class entitlementClass, Class requestingClass, @@ -446,10 +448,10 @@ public void checkWriteProperty(Class callerClass, String property) { ); } - private static void notEntitled(String message, Class callerClass) { + private void notEntitled(String message, Class callerClass) { var exception = new NotEntitledException(message); - // don't log self tests in EntitlementBootstrap - if (EntitlementBootstrap.class.equals(callerClass) == false) { + // Don't emit a log for muted classes, e.g. classes containing self tests + if (mutedClasses.contains(callerClass) == false) { logger.warn(message, exception); } throw exception; diff --git a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java index c9fade1a48219..5a65ea81d0a0e 100644 --- a/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java +++ b/libs/entitlement/src/test/java/org/elasticsearch/entitlement/runtime/policy/PolicyManagerTests.java @@ -87,7 +87,8 @@ public void testGetEntitlementsThrowsOnMissingPluginUnnamedModule() { c -> "plugin1", TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE, - TEST_PATH_LOOKUP + TEST_PATH_LOOKUP, + Set.of() ); // Any class from the current module (unnamed) will do @@ -111,7 +112,8 @@ public void testGetEntitlementsThrowsOnMissingPolicyForPlugin() { c -> "plugin1", TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE, - TEST_PATH_LOOKUP + TEST_PATH_LOOKUP, + Set.of() ); // Any class from the current module (unnamed) will do @@ -131,7 +133,8 @@ public void testGetEntitlementsFailureIsCached() { c -> "plugin1", TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE, - TEST_PATH_LOOKUP + TEST_PATH_LOOKUP, + Set.of() ); // Any class from the current module (unnamed) will do @@ -156,7 +159,8 @@ public void testGetEntitlementsReturnsEntitlementsForPluginUnnamedModule() { c -> "plugin2", TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE, - TEST_PATH_LOOKUP + TEST_PATH_LOOKUP, + Set.of() ); // Any class from the current module (unnamed) will do @@ -174,7 +178,8 @@ public void testGetEntitlementsThrowsOnMissingPolicyForServer() throws ClassNotF c -> null, TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE, - TEST_PATH_LOOKUP + TEST_PATH_LOOKUP, + Set.of() ); // Tests do not run modular, so we cannot use a server class. @@ -204,7 +209,8 @@ public void testGetEntitlementsReturnsEntitlementsForServerModule() throws Class c -> null, TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE, - TEST_PATH_LOOKUP + TEST_PATH_LOOKUP, + Set.of() ); // Tests do not run modular, so we cannot use a server class. @@ -230,7 +236,8 @@ public void testGetEntitlementsReturnsEntitlementsForPluginModule() throws IOExc c -> "mock-plugin", TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE, - TEST_PATH_LOOKUP + TEST_PATH_LOOKUP, + Set.of() ); var layer = createLayerForJar(jar, "org.example.plugin"); @@ -249,7 +256,8 @@ public void testGetEntitlementsResultIsCached() { c -> "plugin2", TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE, - TEST_PATH_LOOKUP + TEST_PATH_LOOKUP, + Set.of() ); // Any class from the current module (unnamed) will do @@ -308,7 +316,8 @@ public void testAgentsEntitlements() throws IOException, ClassNotFoundException c -> c.getPackageName().startsWith(TEST_AGENTS_PACKAGE_NAME) ? null : "test", TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE, - TEST_PATH_LOOKUP + TEST_PATH_LOOKUP, + Set.of() ); ModuleEntitlements agentsEntitlements = policyManager.getEntitlements(TestAgent.class); assertThat(agentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(true)); @@ -336,7 +345,8 @@ public void testDuplicateEntitlements() { c -> "test", TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE, - TEST_PATH_LOOKUP + TEST_PATH_LOOKUP, + Set.of() ) ); assertEquals( @@ -353,7 +363,8 @@ public void testDuplicateEntitlements() { c -> "test", TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE, - TEST_PATH_LOOKUP + TEST_PATH_LOOKUP, + Set.of() ) ); assertEquals( @@ -387,7 +398,8 @@ public void testDuplicateEntitlements() { c -> "plugin1", TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE, - TEST_PATH_LOOKUP + TEST_PATH_LOOKUP, + Set.of() ) ); assertEquals( @@ -407,7 +419,8 @@ public void testPluginResolverOverridesAgents() { c -> "test", // Insist that the class is in a plugin TEST_AGENTS_PACKAGE_NAME, NO_ENTITLEMENTS_MODULE, - TEST_PATH_LOOKUP + TEST_PATH_LOOKUP, + Set.of() ); ModuleEntitlements notAgentsEntitlements = policyManager.getEntitlements(TestAgent.class); assertThat(notAgentsEntitlements.hasEntitlement(CreateClassLoaderEntitlement.class), is(false)); @@ -428,7 +441,8 @@ private static PolicyManager policyManager(String agentsPackageName, Module enti c -> "test", agentsPackageName, entitlementsModule, - TEST_PATH_LOOKUP + TEST_PATH_LOOKUP, + Set.of() ); } diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 05072ae940ae4..5ccb4af45ade2 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -256,9 +256,10 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { nodeEnv.libDir(), nodeEnv.logsDir(), nodeEnv.tmpDir(), - args.pidFile() + args.pidFile(), + Set.of(EntitlementSelfTester.class) ); - entitlementSelfTest(); + EntitlementSelfTester.entitlementSelfTest(); } else { assert RuntimeVersionFeature.isSecurityManagerAvailable(); // no need to explicitly enable native access for legacy code @@ -275,31 +276,33 @@ private static void initPhase2(Bootstrap bootstrap) throws IOException { bootstrap.setPluginsLoader(pluginsLoader); } - // check entitlements were loaded correctly. note this must be outside the entitlements lib. - private static void entitlementSelfTest() { - ensureCannotStartProcess(ProcessBuilder::start); - // Try again with reflection - ensureCannotStartProcess(Elasticsearch::reflectiveStartProcess); - } + private static class EntitlementSelfTester { + // check entitlements were loaded correctly. note this must be outside the entitlements lib. + private static void entitlementSelfTest() { + ensureCannotStartProcess(ProcessBuilder::start); + // Try again with reflection + ensureCannotStartProcess(EntitlementSelfTester::reflectiveStartProcess); + } - private static void ensureCannotStartProcess(CheckedConsumer startProcess) { - try { - // The command doesn't matter; it doesn't even need to exist - startProcess.accept(new ProcessBuilder("")); - } catch (NotEntitledException e) { - return; - } catch (Exception e) { - throw new IllegalStateException("Failed entitlement protection self-test", e); + private static void ensureCannotStartProcess(CheckedConsumer startProcess) { + try { + // The command doesn't matter; it doesn't even need to exist + startProcess.accept(new ProcessBuilder("")); + } catch (NotEntitledException e) { + return; + } catch (Exception e) { + throw new IllegalStateException("Failed entitlement protection self-test", e); + } + throw new IllegalStateException("Entitlement protection self-test was incorrectly permitted"); } - throw new IllegalStateException("Entitlement protection self-test was incorrectly permitted"); - } - private static void reflectiveStartProcess(ProcessBuilder pb) throws Exception { - try { - var start = ProcessBuilder.class.getMethod("start"); - start.invoke(pb); - } catch (InvocationTargetException e) { - throw (Exception) e.getCause(); + private static void reflectiveStartProcess(ProcessBuilder pb) throws Exception { + try { + var start = ProcessBuilder.class.getMethod("start"); + start.invoke(pb); + } catch (InvocationTargetException e) { + throw (Exception) e.getCause(); + } } } From e8438490ea9605445ca0bc2f4ecf31a880f709a8 Mon Sep 17 00:00:00 2001 From: Ying Mao Date: Tue, 25 Feb 2025 09:14:20 -0500 Subject: [PATCH 14/20] Updates to allow using Cohere binary embedding response in semantic search queries. (#121827) * wip * wip * [CI] Auto commit changes from spotless * updating tests * [CI] Auto commit changes from spotless * Update docs/changelog/121827.yaml * Updates after the refactor * [CI] Auto commit changes from spotless * Updating error message --------- Co-authored-by: elasticsearchmachine --- docs/changelog/121827.yaml | 6 ++ .../results/TextEmbeddingBitResults.java | 3 +- .../mapper/SemanticTextFieldMapper.java | 7 +- .../services/cohere/CohereService.java | 9 ++- .../EmbeddingRequestChunkerTests.java | 73 +++++++++++++++++++ .../results/TextEmbeddingBitResultsTests.java | 11 +++ .../TextEmbeddingByteResultsTests.java | 11 +++ .../results/TextEmbeddingResultsTests.java | 11 +++ .../services/cohere/CohereServiceTests.java | 18 ++++- 9 files changed, 140 insertions(+), 9 deletions(-) create mode 100644 docs/changelog/121827.yaml diff --git a/docs/changelog/121827.yaml b/docs/changelog/121827.yaml new file mode 100644 index 0000000000000..11c9c201655a7 --- /dev/null +++ b/docs/changelog/121827.yaml @@ -0,0 +1,6 @@ +pr: 121827 +summary: Updates to allow using Cohere binary embedding response in semantic search + queries +area: Machine Learning +type: bug +issues: [] diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingBitResults.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingBitResults.java index ba4a770b04840..a8f676bf41ce3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingBitResults.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/inference/results/TextEmbeddingBitResults.java @@ -55,7 +55,8 @@ public int getFirstEmbeddingSize() { if (embeddings.isEmpty()) { throw new IllegalStateException("Embeddings list is empty"); } - return embeddings.getFirst().values().length; + // bit embeddings are encoded as bytes so convert this to bits + return Byte.SIZE * embeddings.getFirst().values().length; } @Override diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java index 3bebd8086d792..0ecc76504107e 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/mapper/SemanticTextFieldMapper.java @@ -709,9 +709,12 @@ yield new SparseVectorQueryBuilder( MlTextEmbeddingResults textEmbeddingResults = (MlTextEmbeddingResults) inferenceResults; float[] inference = textEmbeddingResults.getInferenceAsFloat(); - if (inference.length != modelSettings.dimensions()) { + var inferenceLength = modelSettings.elementType() == DenseVectorFieldMapper.ElementType.BIT + ? inference.length * Byte.SIZE + : inference.length; + if (inferenceLength != modelSettings.dimensions()) { throw new IllegalArgumentException( - generateDimensionCountMismatchMessage(inference.length, modelSettings.dimensions()) + generateDimensionCountMismatchMessage(inferenceLength, modelSettings.dimensions()) ); } diff --git a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java index 08ab5f1ba86d0..c951a008df0f5 100644 --- a/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java +++ b/x-pack/plugin/inference/src/main/java/org/elasticsearch/xpack/inference/services/cohere/CohereService.java @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.inference.services.ServiceComponents; import org.elasticsearch.xpack.inference.services.ServiceUtils; import org.elasticsearch.xpack.inference.services.cohere.completion.CohereCompletionModel; +import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingType; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsModel; import org.elasticsearch.xpack.inference.services.cohere.embeddings.CohereEmbeddingsServiceSettings; import org.elasticsearch.xpack.inference.services.cohere.rerank.CohereRerankModel; @@ -313,7 +314,7 @@ public Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) { if (model instanceof CohereEmbeddingsModel embeddingsModel) { var serviceSettings = embeddingsModel.getServiceSettings(); var similarityFromModel = serviceSettings.similarity(); - var similarityToUse = similarityFromModel == null ? defaultSimilarity() : similarityFromModel; + var similarityToUse = similarityFromModel == null ? defaultSimilarity(serviceSettings.getEmbeddingType()) : similarityFromModel; var updatedServiceSettings = new CohereEmbeddingsServiceSettings( new CohereServiceSettings( @@ -341,7 +342,11 @@ public Model updateModelWithEmbeddingDetails(Model model, int embeddingSize) { * * @return The default similarity. */ - static SimilarityMeasure defaultSimilarity() { + static SimilarityMeasure defaultSimilarity(CohereEmbeddingType embeddingType) { + if (embeddingType == CohereEmbeddingType.BIT || embeddingType == CohereEmbeddingType.BINARY) { + return SimilarityMeasure.L2_NORM; + } + return SimilarityMeasure.DOT_PRODUCT; } diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java index 23912b4000d02..7cdae8d51ce0b 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/chunking/EmbeddingRequestChunkerTests.java @@ -13,6 +13,7 @@ import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceEmbedding; import org.elasticsearch.xpack.core.inference.results.ChunkedInferenceError; import org.elasticsearch.xpack.core.inference.results.SparseEmbeddingResults; +import org.elasticsearch.xpack.core.inference.results.TextEmbeddingBitResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingByteResults; import org.elasticsearch.xpack.core.inference.results.TextEmbeddingFloatResults; import org.elasticsearch.xpack.core.ml.search.WeightedToken; @@ -377,6 +378,78 @@ public void testMergingListener_Byte() { } } + public void testMergingListener_Bit() { + int batchSize = 5; + int chunkSize = 20; + int overlap = 0; + // passage will be chunked into batchSize + 1 parts + // and spread over 2 batch requests + int numberOfWordsInPassage = (chunkSize * batchSize) + 5; + + var passageBuilder = new StringBuilder(); + for (int i = 0; i < numberOfWordsInPassage; i++) { + passageBuilder.append("passage_input").append(i).append(" "); // chunk on whitespace + } + List inputs = List.of("1st small", passageBuilder.toString(), "2nd small", "3rd small"); + + var finalListener = testListener(); + var batches = new EmbeddingRequestChunker(inputs, batchSize, chunkSize, overlap).batchRequestsWithListeners(finalListener); + assertThat(batches, hasSize(2)); + + // 4 inputs in 2 batches + { + var embeddings = new ArrayList(); + for (int i = 0; i < batchSize; i++) { + embeddings.add(new TextEmbeddingByteResults.Embedding(new byte[] { randomByte() })); + } + batches.get(0).listener().onResponse(new TextEmbeddingBitResults(embeddings)); + } + { + var embeddings = new ArrayList(); + for (int i = 0; i < 4; i++) { // 4 requests in the 2nd batch + embeddings.add(new TextEmbeddingByteResults.Embedding(new byte[] { randomByte() })); + } + batches.get(1).listener().onResponse(new TextEmbeddingBitResults(embeddings)); + } + + assertNotNull(finalListener.results); + assertThat(finalListener.results, hasSize(4)); + { + var chunkedResult = finalListener.results.get(0); + assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); + var chunkedByteResult = (ChunkedInferenceEmbedding) chunkedResult; + assertThat(chunkedByteResult.chunks(), hasSize(1)); + assertEquals("1st small", chunkedByteResult.chunks().get(0).matchedText()); + } + { + // this is the large input split in multiple chunks + var chunkedResult = finalListener.results.get(1); + assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); + var chunkedByteResult = (ChunkedInferenceEmbedding) chunkedResult; + assertThat(chunkedByteResult.chunks(), hasSize(6)); + assertThat(chunkedByteResult.chunks().get(0).matchedText(), startsWith("passage_input0 ")); + assertThat(chunkedByteResult.chunks().get(1).matchedText(), startsWith(" passage_input20 ")); + assertThat(chunkedByteResult.chunks().get(2).matchedText(), startsWith(" passage_input40 ")); + assertThat(chunkedByteResult.chunks().get(3).matchedText(), startsWith(" passage_input60 ")); + assertThat(chunkedByteResult.chunks().get(4).matchedText(), startsWith(" passage_input80 ")); + assertThat(chunkedByteResult.chunks().get(5).matchedText(), startsWith(" passage_input100 ")); + } + { + var chunkedResult = finalListener.results.get(2); + assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); + var chunkedByteResult = (ChunkedInferenceEmbedding) chunkedResult; + assertThat(chunkedByteResult.chunks(), hasSize(1)); + assertEquals("2nd small", chunkedByteResult.chunks().get(0).matchedText()); + } + { + var chunkedResult = finalListener.results.get(3); + assertThat(chunkedResult, instanceOf(ChunkedInferenceEmbedding.class)); + var chunkedByteResult = (ChunkedInferenceEmbedding) chunkedResult; + assertThat(chunkedByteResult.chunks(), hasSize(1)); + assertEquals("3rd small", chunkedByteResult.chunks().get(0).matchedText()); + } + } + public void testMergingListener_Sparse() { int batchSize = 4; int chunkSize = 10; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingBitResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingBitResultsTests.java index 8d3ead44f2335..fb3203f633ff2 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingBitResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingBitResultsTests.java @@ -105,6 +105,17 @@ public void testTransformToCoordinationFormat() { ); } + public void testGetFirstEmbeddingSize() { + var firstEmbeddingSize = new TextEmbeddingBitResults( + List.of( + new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 23, (byte) 24 }), + new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 25, (byte) 26 }) + ) + ).getFirstEmbeddingSize(); + + assertThat(firstEmbeddingSize, is(16)); + } + @Override protected Writeable.Reader instanceReader() { return TextEmbeddingBitResults::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingByteResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingByteResultsTests.java index aaa823fda0723..945eadd67d1ff 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingByteResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingByteResultsTests.java @@ -104,6 +104,17 @@ public void testTransformToCoordinationFormat() { ); } + public void testGetFirstEmbeddingSize() { + var firstEmbeddingSize = new TextEmbeddingByteResults( + List.of( + new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 23, (byte) 24 }), + new TextEmbeddingByteResults.Embedding(new byte[] { (byte) 25, (byte) 26 }) + ) + ).getFirstEmbeddingSize(); + + assertThat(firstEmbeddingSize, is(2)); + } + @Override protected Writeable.Reader instanceReader() { return TextEmbeddingByteResults::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java index 77cbeaea68c39..c56defb693825 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/results/TextEmbeddingResultsTests.java @@ -105,6 +105,17 @@ public void testTransformToCoordinationFormat() { ); } + public void testGetFirstEmbeddingSize() { + var firstEmbeddingSize = new TextEmbeddingFloatResults( + List.of( + new TextEmbeddingFloatResults.Embedding(new float[] { 0.1F, 0.2F }), + new TextEmbeddingFloatResults.Embedding(new float[] { 0.3F, 0.4F }) + ) + ).getFirstEmbeddingSize(); + + assertThat(firstEmbeddingSize, is(2)); + } + @Override protected Writeable.Reader instanceReader() { return TextEmbeddingFloatResults::new; diff --git a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java index 8549871d67ffd..bd1dbc201f52e 100644 --- a/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java +++ b/x-pack/plugin/inference/src/test/java/org/elasticsearch/xpack/inference/services/cohere/CohereServiceTests.java @@ -1100,6 +1100,7 @@ private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure si try (var service = new CohereService(senderFactory, createWithEmptySettings(threadPool))) { var embeddingSize = randomNonNegativeInt(); + var embeddingType = randomFrom(CohereEmbeddingType.values()); var model = CohereEmbeddingsModelTests.createModel( randomAlphaOfLength(10), randomAlphaOfLength(10), @@ -1107,13 +1108,15 @@ private void testUpdateModelWithEmbeddingDetails_Successful(SimilarityMeasure si randomNonNegativeInt(), randomNonNegativeInt(), randomAlphaOfLength(10), - randomFrom(CohereEmbeddingType.values()), + embeddingType, similarityMeasure ); Model updatedModel = service.updateModelWithEmbeddingDetails(model, embeddingSize); - SimilarityMeasure expectedSimilarityMeasure = similarityMeasure == null ? CohereService.defaultSimilarity() : similarityMeasure; + SimilarityMeasure expectedSimilarityMeasure = similarityMeasure == null + ? CohereService.defaultSimilarity(embeddingType) + : similarityMeasure; assertEquals(expectedSimilarityMeasure, updatedModel.getServiceSettings().similarity()); assertEquals(embeddingSize, updatedModel.getServiceSettings().dimensions().intValue()); } @@ -1590,8 +1593,15 @@ public void testChunkedInfer_BatchesCalls_Bytes() throws IOException { } } - public void testDefaultSimilarity() { - assertEquals(SimilarityMeasure.DOT_PRODUCT, CohereService.defaultSimilarity()); + public void testDefaultSimilarity_BinaryEmbedding() { + assertEquals(SimilarityMeasure.L2_NORM, CohereService.defaultSimilarity(CohereEmbeddingType.BINARY)); + assertEquals(SimilarityMeasure.L2_NORM, CohereService.defaultSimilarity(CohereEmbeddingType.BIT)); + } + + public void testDefaultSimilarity_NotBinaryEmbedding() { + assertEquals(SimilarityMeasure.DOT_PRODUCT, CohereService.defaultSimilarity(CohereEmbeddingType.FLOAT)); + assertEquals(SimilarityMeasure.DOT_PRODUCT, CohereService.defaultSimilarity(CohereEmbeddingType.BYTE)); + assertEquals(SimilarityMeasure.DOT_PRODUCT, CohereService.defaultSimilarity(CohereEmbeddingType.INT8)); } public void testInfer_StreamRequest() throws Exception { From ae6474db630a5686f8ff221a4b435a6920a184b8 Mon Sep 17 00:00:00 2001 From: Kathleen DeRusso Date: Tue, 25 Feb 2025 10:02:50 -0500 Subject: [PATCH 15/20] Deprecate Behavioral Analytics CRUD apis (#122960) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Deprecate Behavioral Analytics CRUD APIs * Add allowed warning for REST Compatibility tests * Update docs/changelog/122960.yaml * Update changelog * Update docs to add deprecation flags and fix failing tests * Update changelog * Update changelog again * Update docs formatting Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> * Skip asciidoc test --------- Co-authored-by: Efe Gürkan YALAMAN Co-authored-by: Liam Thompson <32779855+leemthompo@users.noreply.github.com> Co-authored-by: Efe Gürkan YALAMAN --- docs/changelog/122960.yaml | 10 + .../apis/delete-analytics-collection.asciidoc | 11 +- .../behavioral-analytics/apis/index.asciidoc | 1 + .../apis/list-analytics-collection.asciidoc | 25 +-- .../post-analytics-collection-event.asciidoc | 16 +- .../apis/put-analytics-collection.asciidoc | 10 +- .../behavioral-analytics-api.asciidoc | 3 + .../behavioral-analytics-cors.asciidoc | 4 +- ...avioral-analytics-event-reference.asciidoc | 2 + .../behavioral-analytics-event.asciidoc | 2 + .../behavioral-analytics-overview.asciidoc | 3 + .../behavioral-analytics-start.asciidoc | 2 + ...plication.delete_behavioral_analytics.json | 4 + ..._application.get_behavioral_analytics.json | 12 +- ...ation.post_behavioral_analytics_event.json | 4 + ..._application.put_behavioral_analytics.json | 4 + x-pack/plugin/ent-search/qa/rest/build.gradle | 8 + .../rest-api-spec/test/entsearch/20_usage.yml | 6 + .../10_behavioral_analytics_list.yml | 26 ++- .../analytics/20_behavioral_analytics_put.yml | 14 ++ .../30_behavioral_analytics_delete.yml | 18 ++ .../40_behavioral_analytics_event_post.yml | 178 ++++++++++++------ ..._search_application_search_with_apikey.yml | 56 +++--- .../xpack/application/EnterpriseSearch.java | 4 + .../analytics/AnalyticsCollection.java | 2 + .../AnalyticsCollectionResolver.java | 2 + .../analytics/AnalyticsCollectionService.java | 2 + .../analytics/AnalyticsConstants.java | 4 + .../AnalyticsEventIngestService.java | 2 + .../analytics/AnalyticsTemplateRegistry.java | 4 + .../DeleteAnalyticsCollectionAction.java | 4 + .../action/GetAnalyticsCollectionAction.java | 4 + .../action/PostAnalyticsEventAction.java | 4 + .../action/PutAnalyticsCollectionAction.java | 4 + .../RestDeleteAnalyticsCollectionAction.java | 4 + .../RestGetAnalyticsCollectionAction.java | 4 + .../action/RestPostAnalyticsEventAction.java | 4 + .../RestPutAnalyticsCollectionAction.java | 4 + ...nsportDeleteAnalyticsCollectionAction.java | 11 ++ ...TransportGetAnalyticsCollectionAction.java | 11 ++ .../TransportPostAnalyticsEventAction.java | 13 +- ...TransportPutAnalyticsCollectionAction.java | 15 +- .../analytics/event/AnalyticsEvent.java | 2 + .../event/AnalyticsEventFactory.java | 2 + .../parser/event/PageViewAnalyticsEvent.java | 4 + .../parser/event/SearchAnalyticsEvent.java | 4 + .../event/SearchClickAnalyticsEvent.java | 4 + .../field/DocumentAnalyticsEventField.java | 4 + .../parser/field/PageAnalyticsEventField.java | 4 + .../field/PaginationAnalyticsEventField.java | 4 + .../field/SearchAnalyticsEventField.java | 4 + .../SearchFiltersAnalyticsEventField.java | 4 + .../SearchResultAnalyticsEventField.java | 4 + .../field/SessionAnalyticsEventField.java | 4 + .../field/SortOrderAnalyticsEventField.java | 4 + .../parser/field/UserAnalyticsEventField.java | 4 + .../ingest/AnalyticsEventEmitter.java | 4 + .../ingest/AnalyticsEventIngestConfig.java | 2 + .../ingest/BulkProcessorFactory.java | 2 + 59 files changed, 441 insertions(+), 140 deletions(-) create mode 100644 docs/changelog/122960.yaml diff --git a/docs/changelog/122960.yaml b/docs/changelog/122960.yaml new file mode 100644 index 0000000000000..5745c41b007be --- /dev/null +++ b/docs/changelog/122960.yaml @@ -0,0 +1,10 @@ +pr: 122960 +summary: Deprecate Behavioral Analytics CRUD apis +area: Search +type: deprecation +issues: [ ] +deprecation: + title: Deprecate Behavioral Analytics CRUD apis + area: Search + details: Behavioral Analytics has been deprecated as of 9.0.0 and will be removed in a future release. The APIs will still work for now, but will emit warning headers that the API has been deprecated. + impact: Behavioral Analytics has been deprecated as of 9.0.0 and will be removed in a future release. diff --git a/docs/reference/behavioral-analytics/apis/delete-analytics-collection.asciidoc b/docs/reference/behavioral-analytics/apis/delete-analytics-collection.asciidoc index 19c1b5437ef0c..1c6c39ea137e9 100644 --- a/docs/reference/behavioral-analytics/apis/delete-analytics-collection.asciidoc +++ b/docs/reference/behavioral-analytics/apis/delete-analytics-collection.asciidoc @@ -2,6 +2,7 @@ [[delete-analytics-collection]] === Delete Analytics Collection +deprecated:[9.0.0] beta::[] ++++ @@ -14,15 +15,6 @@ beta::[] For the most up-to-date API details, refer to {api-es}/group/endpoint-analytics[Behavioral analytics APIs]. -- -//// -[source,console] ----- -PUT _application/analytics/my_analytics_collection ----- -// TESTSETUP - -//// - Removes a <> Collection and its associated data stream. [[delete-analytics-collection-request]] @@ -59,3 +51,4 @@ The following example deletes the Analytics Collection named `my_analytics_colle ---- DELETE _application/analytics/my_analytics_collection/ ---- +// TEST[skip:Behavioral Analytics APIs emit deprecation warnings and will not be updated] diff --git a/docs/reference/behavioral-analytics/apis/index.asciidoc b/docs/reference/behavioral-analytics/apis/index.asciidoc index 6dc12599c2297..1fdcd0f1afc9a 100644 --- a/docs/reference/behavioral-analytics/apis/index.asciidoc +++ b/docs/reference/behavioral-analytics/apis/index.asciidoc @@ -1,6 +1,7 @@ [[behavioral-analytics-apis]] == Behavioral Analytics APIs +deprecated:[9.0.0] beta::[] ++++ diff --git a/docs/reference/behavioral-analytics/apis/list-analytics-collection.asciidoc b/docs/reference/behavioral-analytics/apis/list-analytics-collection.asciidoc index 46ee8296f3ebe..c0892529bb58f 100644 --- a/docs/reference/behavioral-analytics/apis/list-analytics-collection.asciidoc +++ b/docs/reference/behavioral-analytics/apis/list-analytics-collection.asciidoc @@ -2,6 +2,7 @@ [[list-analytics-collection]] === List Analytics Collections +deprecated:[9.0.0] beta::[] ++++ @@ -14,22 +15,6 @@ beta::[] For the most up-to-date API details, refer to {api-es}/group/endpoint-analytics[Behavioral analytics APIs]. -- -//// -[source,console] ----- -PUT _application/analytics/my_analytics_collection -PUT _application/analytics/my_analytics_collection2 ----- -// TESTSETUP - -[source,console] ----- -DELETE _application/analytics/my_analytics_collection -DELETE _application/analytics/my_analytics_collection2 ----- -// TEARDOWN -//// - Returns information about <> Collections. [[list-analytics-collection-request]] @@ -46,8 +31,9 @@ Requires the `manage_behavioral_analytics` cluster privilege. ==== {api-path-parms-title} ``:: -(optional, string) -Criteria is used to find a matching analytics collection. This could be the name of the collection or a pattern to match multiple. If not specified, will return all analytics collections. +(optional, string) Criteria is used to find a matching analytics collection. +This could be the name of the collection or a pattern to match multiple. +If not specified, will return all analytics collections. [[list-analytics-collection-response-codes]] ==== {api-response-codes-title} @@ -66,6 +52,7 @@ The following example lists all configured Analytics Collections: ---- GET _application/analytics/ ---- +// TEST[skip:Behavioral Analytics APIs emit deprecation warnings and will not be updated] A sample response: @@ -91,6 +78,7 @@ The following example returns the Analytics Collection that matches `my_analytic ---- GET _application/analytics/my_analytics_collection ---- +// TEST[skip:Behavioral Analytics APIs emit deprecation warnings and will not be updated] A sample response: @@ -111,6 +99,7 @@ The following example returns all Analytics Collections prefixed with `my`: ---- GET _application/analytics/my* ---- +// TEST[skip:Behavioral Analytics APIs emit deprecation warnings and will not be updated] A sample response: diff --git a/docs/reference/behavioral-analytics/apis/post-analytics-collection-event.asciidoc b/docs/reference/behavioral-analytics/apis/post-analytics-collection-event.asciidoc index 60985cd50d3d1..aad246872e927 100644 --- a/docs/reference/behavioral-analytics/apis/post-analytics-collection-event.asciidoc +++ b/docs/reference/behavioral-analytics/apis/post-analytics-collection-event.asciidoc @@ -2,6 +2,7 @@ [[post-analytics-collection-event]] === Post Event to an Analytics Collection +deprecated:[9.0.0] beta::[] ++++ @@ -14,20 +15,6 @@ beta::[] For the most up-to-date API details, refer to {api-es}/group/endpoint-analytics[Behavioral analytics APIs]. -- -//// -[source,console] ----- -PUT _application/analytics/my_analytics_collection ----- -// TESTSETUP - -[source,console] ----- -DELETE _application/analytics/my_analytics_collection ----- -// TEARDOWN -//// - Post an event to a <> Collection. [[post-analytics-collection-event-request]] @@ -105,3 +92,4 @@ POST _application/analytics/my_analytics_collection/event/search_click } } ---- +// TEST[skip:Behavioral Analytics APIs emit deprecation warnings and will not be updated] diff --git a/docs/reference/behavioral-analytics/apis/put-analytics-collection.asciidoc b/docs/reference/behavioral-analytics/apis/put-analytics-collection.asciidoc index 412277afa2076..0547630db9543 100644 --- a/docs/reference/behavioral-analytics/apis/put-analytics-collection.asciidoc +++ b/docs/reference/behavioral-analytics/apis/put-analytics-collection.asciidoc @@ -2,6 +2,7 @@ [[put-analytics-collection]] === Put Analytics Collection +deprecated:[9.0.0] beta::[] ++++ @@ -14,14 +15,6 @@ beta::[] For the most up-to-date API details, refer to {api-es}/group/endpoint-analytics[Behavioral analytics APIs]. -- -//// -[source,console] ----- -DELETE _application/analytics/my_analytics_collection ----- -// TEARDOWN -//// - Creates a <> Collection. [[put-analytics-collection-request]] @@ -55,3 +48,4 @@ The following example creates a new Analytics Collection called `my_analytics_co ---- PUT _application/analytics/my_analytics_collection ---- +// TEST[skip:Behavioral Analytics APIs emit deprecation warnings and will not be updated] diff --git a/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-api.asciidoc b/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-api.asciidoc index 1c406b8c8dfdf..715be30924fd7 100644 --- a/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-api.asciidoc +++ b/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-api.asciidoc @@ -1,9 +1,12 @@ [[behavioral-analytics-api]] === Behavioral Analytics API overview + ++++ API overview ++++ +deprecated:[9.0.0] + This page outlines all the APIs available for behavioral analytics and links to their documentation. [discrete] diff --git a/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-cors.asciidoc b/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-cors.asciidoc index d67e47d1b02f0..3dd75e06e3a62 100644 --- a/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-cors.asciidoc +++ b/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-cors.asciidoc @@ -4,6 +4,8 @@ Set up CORs ++++ +deprecated:[9.0.0] + Behavioral Analytics sends events directly to the {es} API. This means that the browser makes requests to the {es} API directly. {es} supports https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS[Cross-Origin Resource Sharing (CORS)^], but this feature is disabled by default. @@ -43,4 +45,4 @@ On Elastic Cloud, you can do this by {cloud}/ec-add-user-settings.html#ec-add-us ==== Proxy the request through a server that supports CORS If you are unable to enable CORS on {es}, you can proxy the request through a server that supports CORS. -This is more complicated, but is a viable option. \ No newline at end of file +This is more complicated, but is a viable option. diff --git a/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-event-reference.asciidoc b/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-event-reference.asciidoc index 42dbd2313c920..d5aa69f0f3fff 100644 --- a/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-event-reference.asciidoc +++ b/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-event-reference.asciidoc @@ -4,6 +4,8 @@ Events reference ++++ +deprecated:[9.0.0] + Behavioral Analytics logs events using the {ecs-ref}/ecs-reference.html[Elastic Common Schema^], including a custom field set for analytics events. Refer to <> of the full data objects that are logged. diff --git a/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-event.asciidoc b/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-event.asciidoc index 3511f761456cf..9aff3b89cd501 100644 --- a/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-event.asciidoc +++ b/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-event.asciidoc @@ -4,6 +4,8 @@ View events ++++ +deprecated:[9.0.0] + [TIP] ==== Refer to <> for a complete list of the fields logged by events. diff --git a/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-overview.asciidoc b/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-overview.asciidoc index 95306e442b13e..47d6f1dd80fb8 100644 --- a/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-overview.asciidoc +++ b/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-overview.asciidoc @@ -1,6 +1,9 @@ [[behavioral-analytics-overview]] == Search analytics +deprecated:[9.0.0] + + Behavioral Analytics is an analytics event collection platform. Use these tools to analyze your users' searching and clicking behavior. Leverage this information to improve the relevance of your search results and identify gaps in your content. diff --git a/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-start.asciidoc b/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-start.asciidoc index f29a6f3a37fdf..a32837788416c 100644 --- a/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-start.asciidoc +++ b/docs/reference/search/search-your-data/behavioral-analytics/behavioral-analytics-start.asciidoc @@ -4,6 +4,8 @@ Get started ++++ +deprecated:[9.0.0] + You can manage your analytics in the {kib} UI. Go to *Search > Behavioral Analytics* to get started. diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.delete_behavioral_analytics.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.delete_behavioral_analytics.json index 7f19b6debce8b..77a99c4c5d836 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.delete_behavioral_analytics.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.delete_behavioral_analytics.json @@ -6,6 +6,10 @@ }, "stability": "experimental", "visibility": "public", + "deprecated": { + "version": "9.0.0", + "description": "Behavioral Analytics has been deprecated and will be removed in a future release." + }, "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.get_behavioral_analytics.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.get_behavioral_analytics.json index 175f5e8202b11..c4d2edcf42fc2 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.get_behavioral_analytics.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.get_behavioral_analytics.json @@ -6,6 +6,10 @@ }, "stability": "experimental", "visibility": "public", + "deprecated": { + "version": "9.0.0", + "description": "Behavioral Analytics has been deprecated and will be removed in a future release." + }, "headers": { "accept": [ "application/json" @@ -24,10 +28,10 @@ "methods": [ "GET" ], - "parts":{ - "name":{ - "type":"list", - "description":"A comma-separated list of analytics collections to limit the returned information" + "parts": { + "name": { + "type": "list", + "description": "A comma-separated list of analytics collections to limit the returned information" } } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.post_behavioral_analytics_event.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.post_behavioral_analytics_event.json index 1364fa0d9ef30..b497c3b1314bc 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.post_behavioral_analytics_event.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.post_behavioral_analytics_event.json @@ -6,6 +6,10 @@ }, "stability": "experimental", "visibility": "public", + "deprecated": { + "version": "9.0.0", + "description": "Behavioral Analytics has been deprecated and will be removed in a future release." + }, "headers": { "accept": [ "application/json" diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.put_behavioral_analytics.json b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.put_behavioral_analytics.json index 4929e6b6621d5..811791dd586dd 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.put_behavioral_analytics.json +++ b/rest-api-spec/src/main/resources/rest-api-spec/api/search_application.put_behavioral_analytics.json @@ -5,6 +5,10 @@ "description": "Creates a behavioral analytics collection." }, "stability": "experimental", + "deprecated": { + "version": "9.0.0", + "description": "Behavioral Analytics has been deprecated and will be removed in a future release." + }, "visibility": "public", "headers": { "accept": [ diff --git a/x-pack/plugin/ent-search/qa/rest/build.gradle b/x-pack/plugin/ent-search/qa/rest/build.gradle index 5b04a326f142c..68255a5e7d439 100644 --- a/x-pack/plugin/ent-search/qa/rest/build.gradle +++ b/x-pack/plugin/ent-search/qa/rest/build.gradle @@ -37,3 +37,11 @@ testClusters.configureEach { artifacts { restXpackTests(new File(projectDir, "src/yamlRestTest/resources/rest-api-spec/test")) } + + +tasks.named("yamlRestCompatTestTransform").configure( + { task -> + // Behavioral Analytics is deprecated with 9.0.0. + task.addAllowedWarning("Behavioral Analytics is deprecated and will be removed in a future release.") + } +) diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/20_usage.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/20_usage.yml index 8271b17626600..eca45925b2937 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/20_usage.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/20_usage.yml @@ -27,6 +27,8 @@ teardown: --- "xpack usage includes Enterprise Search": + - requires: + test_runner_features: [ allowed_warnings ] - do: xpack.usage: { } @@ -79,6 +81,8 @@ teardown: query: "{{query_string}}" - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.put_behavioral_analytics: name: test-analytics-collection @@ -113,6 +117,8 @@ teardown: } - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.delete_behavioral_analytics: name: test-analytics-collection diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/10_behavioral_analytics_list.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/10_behavioral_analytics_list.yml index 387348cf4064f..94aea5bbd70fd 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/10_behavioral_analytics_list.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/10_behavioral_analytics_list.yml @@ -1,39 +1,55 @@ setup: + - requires: + test_runner_features: [ allowed_warnings ] - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.put_behavioral_analytics: name: my-test-analytics-collection - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.put_behavioral_analytics: name: my-test-analytics-collection2 --- teardown: + - requires: + test_runner_features: [ allowed_warnings ] - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.delete_behavioral_analytics: name: my-test-analytics-collection - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.delete_behavioral_analytics: name: my-test-analytics-collection2 --- "Get Analytics Collection for a particular collection": - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.get_behavioral_analytics: name: my-test-analytics-collection - match: { - "my-test-analytics-collection": { - event_data_stream: { - name: "behavioral_analytics-events-my-test-analytics-collection" - } + "my-test-analytics-collection": { + event_data_stream: { + name: "behavioral_analytics-events-my-test-analytics-collection" } } + } --- "Get Analytics Collection list": - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.get_behavioral_analytics: name: @@ -56,6 +72,8 @@ teardown: "Get Analytics Collection - Resource does not exist": - do: catch: "missing" + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.get_behavioral_analytics: name: test-nonexistent-analytics-collection diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/20_behavioral_analytics_put.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/20_behavioral_analytics_put.yml index 7252de0b3b792..427a46d69ad9c 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/20_behavioral_analytics_put.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/20_behavioral_analytics_put.yml @@ -1,11 +1,19 @@ teardown: + - requires: + test_runner_features: [ allowed_warnings ] - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.delete_behavioral_analytics: name: test-analytics-collection --- "Create Analytics Collection": + - requires: + test_runner_features: [ allowed_warnings ] - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.put_behavioral_analytics: name: test-analytics-collection @@ -14,7 +22,11 @@ teardown: --- "Create Analytics Collection - analytics collection already exists": + - requires: + test_runner_features: [ allowed_warnings ] - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.put_behavioral_analytics: name: test-analytics-collection @@ -22,6 +34,8 @@ teardown: - do: catch: bad_request + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.put_behavioral_analytics: name: test-analytics-collection diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/30_behavioral_analytics_delete.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/30_behavioral_analytics_delete.yml index d6b158cd4a52c..5b055bd952fed 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/30_behavioral_analytics_delete.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/30_behavioral_analytics_delete.yml @@ -1,18 +1,30 @@ setup: + - requires: + test_runner_features: [ allowed_warnings ] - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.put_behavioral_analytics: name: my-test-analytics-collection --- teardown: + - requires: + test_runner_features: [ allowed_warnings ] - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.delete_behavioral_analytics: name: my-test-analytics-collection ignore: 404 --- "Delete Analytics Collection": + - requires: + test_runner_features: [ allowed_warnings ] - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.delete_behavioral_analytics: name: my-test-analytics-collection @@ -20,13 +32,19 @@ teardown: - do: catch: "missing" + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.get_behavioral_analytics: name: my-test-analytics-collection --- "Delete Analytics Collection - Analytics Collection does not exist": + - requires: + test_runner_features: [ allowed_warnings ] - do: catch: "missing" + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.delete_behavioral_analytics: name: test-nonexistent-analytics-collection diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/40_behavioral_analytics_event_post.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/40_behavioral_analytics_event_post.yml index f26bc24805164..396fc4e06b988 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/40_behavioral_analytics_event_post.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/analytics/40_behavioral_analytics_event_post.yml @@ -1,11 +1,19 @@ setup: + - requires: + test_runner_features: [ allowed_warnings ] - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.put_behavioral_analytics: name: my-test-analytics-collection --- teardown: + - requires: + test_runner_features: [ allowed_warnings ] - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.delete_behavioral_analytics: name: my-test-analytics-collection @@ -13,30 +21,34 @@ teardown: # Page view event tests ######################################### --- "Post page_view analytics event": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "page_view" body: - session: - id: "123" - user: - id: "456" - page: - url: "https://www.elastic.co" + session: + id: "123" + user: + id: "456" + page: + url: "https://www.elastic.co" --- "Post page_view analytics event - Missing page.url": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: catch: "bad_request" headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "page_view" @@ -49,11 +61,13 @@ teardown: --- "Post page_view analytics event - With document": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "page_view" @@ -70,11 +84,13 @@ teardown: --- "Post page_view analytics event - With page title": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "page_view" @@ -89,11 +105,13 @@ teardown: --- "Post page_view analytics event - With referrer": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "page_view" @@ -108,14 +126,16 @@ teardown: --- "Post page_view analytics event - debug and session information": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: X-Forwarded-For: 192.23.12.12 User-Agent: Mozilla/5.0 Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "page_view" @@ -152,11 +172,13 @@ teardown: # Search event tests ############################################ --- "Post search analytics event": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "search" @@ -170,12 +192,14 @@ teardown: --- "Post search analytics event – Missing search query": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: catch: "bad_request" headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "search" @@ -188,11 +212,13 @@ teardown: --- "Post search analytics event - With sort order": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "search" @@ -208,11 +234,13 @@ teardown: --- "Post search analytics event - With sort name and direction": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "search" @@ -229,11 +257,13 @@ teardown: --- "Post search analytics event - With pagination": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "search" @@ -250,11 +280,13 @@ teardown: --- "Post search analytics event - With search application": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "search" @@ -269,10 +301,12 @@ teardown: --- "Post search analytics event - With search results": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection @@ -287,7 +321,7 @@ teardown: results: total_results: 150 items: - - document : + - document: id: doc-1 - document: id: doc-2 @@ -302,11 +336,13 @@ teardown: --- "Post search analytics event - With filters": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "search" @@ -324,14 +360,16 @@ teardown: --- "Post search analytics event - debug and session information": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: X-Forwarded-For: 192.23.12.12 User-Agent: Mozilla/5.0 Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "search" @@ -393,11 +431,13 @@ teardown: # Search click event tests ####################################### --- "Post search_click analytics event": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "search_click" @@ -415,14 +455,16 @@ teardown: --- "Post search_click analytics event - debug and session information": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: X-Forwarded-For: 192.23.12.12 User-Agent: Mozilla/5.0 Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "search_click" @@ -455,11 +497,13 @@ teardown: --- "Post search_click analytics event - Page Only": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "search_click" @@ -475,11 +519,13 @@ teardown: --- "Post search_click analytics event - Document Only": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "search_click" @@ -496,12 +542,14 @@ teardown: --- "Post search_click analytics event – Missing search query": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: catch: "bad_request" headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "search_click" @@ -516,12 +564,14 @@ teardown: --- "Post search_click analytics event – Missing page url and document": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: catch: "bad_request" headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "search_click" @@ -537,12 +587,14 @@ teardown: # Generic errors tests ############################################### --- "Post analytics event - Analytics collection does not exist": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: catch: "missing" headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: test-nonexistent-analytics-collection event_type: "page_view" @@ -556,12 +608,14 @@ teardown: --- "Post analytics event - Event type does not exist": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: catch: "bad_request" headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "nonexistent-event-type" @@ -577,12 +631,14 @@ teardown: --- "Post page_view analytics event - Missing session.id": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: catch: "bad_request" headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "page_view" @@ -595,12 +651,14 @@ teardown: --- "Post page_view analytics event - Missing user.id": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: catch: "bad_request" headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "page_view" @@ -613,12 +671,14 @@ teardown: --- "Post analytics event - Unknown event field": - - skip: - features: headers + - requires: + test_runner_features: [ allowed_warnings, headers ] - do: catch: "bad_request" headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.post_behavioral_analytics_event: collection_name: my-test-analytics-collection event_type: "nonexistent-event-type" diff --git a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/search/56_search_application_search_with_apikey.yml b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/search/56_search_application_search_with_apikey.yml index b11b5fb4f3b6b..ae91fefa8cf82 100644 --- a/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/search/56_search_application_search_with_apikey.yml +++ b/x-pack/plugin/ent-search/qa/rest/src/yamlRestTest/resources/rest-api-spec/test/entsearch/search/56_search_application_search_with_apikey.yml @@ -1,4 +1,6 @@ setup: + - requires: + test_runner_features: [ allowed_warnings ] - do: indices.create: index: test-search-index1 @@ -27,6 +29,8 @@ setup: number_of_replicas: 0 - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.put: name: test-search-application body: @@ -51,6 +55,8 @@ setup: type: string - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.put: name: test-search-application-1 body: @@ -110,10 +116,14 @@ setup: refresh: true - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.put_behavioral_analytics: name: my-test-analytics-collection --- teardown: + - requires: + test_runner_features: [ allowed_warnings ] - do: search_application.delete: name: test-search-application @@ -145,18 +155,20 @@ teardown: ignore: 404 - do: + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.delete_behavioral_analytics: name: my-test-analytics-collection --- "Query Search Application with API key": - - skip: - features: headers + - requires: + test_runner_features: [ headers, allowed_warnings ] - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user security.create_api_key: - body: > + body: > { "name": "search-application-api-key", "role_descriptors": { @@ -176,10 +188,10 @@ teardown: } - match: { name: "search-application-api-key" } - - set: { encoded: api_key_encoded } + - set: { encoded: api_key_encoded } - set: { id: api_key_id } -# Query Search Application with default parameters: + # Query Search Application with default parameters: - do: headers: Authorization: ApiKey ${api_key_encoded} @@ -191,7 +203,7 @@ teardown: - match: { hits.hits.1._id: "doc2" } -# Query Search Application overriding part of the parameters: + # Query Search Application overriding part of the parameters: - do: headers: Authorization: ApiKey ${api_key_encoded} @@ -205,7 +217,7 @@ teardown: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc1" } -# Query Search Application overriding all parameters: + # Query Search Application overriding all parameters: - do: headers: Authorization: ApiKey ${api_key_encoded} @@ -220,7 +232,7 @@ teardown: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc2" } -# Query Search Application with list of parameters: + # Query Search Application with list of parameters: - do: headers: Authorization: ApiKey ${api_key_encoded} @@ -241,7 +253,7 @@ teardown: - match: { hits.total.value: 1 } - match: { hits.hits.0._id: "doc2" } -# Query Search Application with invalid parameter validation: + # Query Search Application with invalid parameter validation: - do: catch: "bad_request" headers: @@ -253,7 +265,7 @@ teardown: field_name: field3 field_value: 35 -# Query Search Application without required parameter: + # Query Search Application without required parameter: - do: catch: "bad_request" headers: @@ -264,7 +276,7 @@ teardown: params: field_value: test -# Query Search Application - not found: + # Query Search Application - not found: - do: catch: forbidden headers: @@ -276,11 +288,13 @@ teardown: field_name: field3 field_value: value3 -# Get Analytics Collection should be rejected due to a workflow restriction + # Get Analytics Collection should be rejected due to a workflow restriction - do: catch: forbidden headers: Authorization: ApiKey ${api_key_encoded} + allowed_warnings: + - "Behavioral Analytics is deprecated and will be removed in a future release." search_application.get_behavioral_analytics: name: - match: { status: 403 } @@ -288,7 +302,7 @@ teardown: - match: { error.root_cause.0.type: role_restriction_exception } - match: { error.root_cause.0.reason: "access restricted by workflow" } -# Get API key should not be allowed + # Get API key should not be allowed - do: catch: forbidden headers: @@ -300,18 +314,18 @@ teardown: - match: { error.root_cause.0.type: role_restriction_exception } - match: { error.root_cause.0.reason: "access restricted by workflow" } -# Authenticate with API key should not be allowed + # Authenticate with API key should not be allowed - do: catch: forbidden headers: Authorization: ApiKey ${api_key_encoded} - security.authenticate: {} + security.authenticate: { } - match: { status: 403 } - match: { error.type: security_exception } - match: { error.root_cause.0.type: role_restriction_exception } - match: { error.root_cause.0.reason: "access restricted by workflow" } -# Direct index search should be rejected due to a workflow restriction + # Direct index search should be rejected due to a workflow restriction - do: catch: forbidden headers: @@ -327,11 +341,11 @@ teardown: - match: { error.root_cause.0.type: role_restriction_exception } - match: { error.root_cause.0.reason: "access restricted by workflow" } -# Creating an API key which can only search 'test-search-application-1' + # Creating an API key which can only search 'test-search-application-1' - do: headers: { Authorization: "Basic ZW50c2VhcmNoLXVzZXI6ZW50c2VhcmNoLXVzZXItcGFzc3dvcmQ=" } # user security.create_api_key: - body: > + body: > { "name": "search-application-1-api-key", "role_descriptors": { @@ -350,10 +364,10 @@ teardown: } - match: { name: "search-application-1-api-key" } - - set: { encoded: api_key_encoded_1 } + - set: { encoded: api_key_encoded_1 } - set: { id: api_key_id_1 } -# Query Search Application 'test-search-application' should be denied since API key (api_key_encoded_1) does not have required index privilege + # Query Search Application 'test-search-application' should be denied since API key (api_key_encoded_1) does not have required index privilege - do: catch: forbidden headers: @@ -364,7 +378,7 @@ teardown: - match: { error.type: security_exception } - match: { error.reason: "action [indices:data/read/xpack/application/search_application/search] is unauthorized for API key id [${api_key_id_1}] of user [entsearch-user] on indices [test-search-application], this action is granted by the index privileges [read,all]" } -# Query Search Application 'test-search-application-1' with new API key (api_key_encoded_1) should be allowed: + # Query Search Application 'test-search-application-1' with new API key (api_key_encoded_1) should be allowed: - do: headers: Authorization: ApiKey ${api_key_encoded_1} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java index df1c76ccf770f..db6ee3a621d84 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/EnterpriseSearch.java @@ -233,6 +233,10 @@ public class EnterpriseSearch extends Plugin implements ActionPlugin, SystemInde private final boolean enabled; + // NOTE: Behavioral Analytics is deprecated in 9.0 but not 8.x. + public static final String BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE = + "Behavioral Analytics is deprecated and will be removed in a future release."; + public EnterpriseSearch(Settings settings) { this.enabled = XPackSettings.ENTERPRISE_SEARCH_ENABLED.get(settings); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollection.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollection.java index 96ea6dfaec535..b0c000a3ffa6d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollection.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollection.java @@ -28,7 +28,9 @@ /** * The {@link AnalyticsCollection} model. + * @deprecated in 9.0 */ +@Deprecated public class AnalyticsCollection implements Writeable, ToXContentObject { private static final ObjectParser PARSER = ObjectParser.fromBuilder( diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionResolver.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionResolver.java index 405c29b00586d..d6df46c1882b9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionResolver.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionResolver.java @@ -28,7 +28,9 @@ /** * A service that allows the resolution of {@link AnalyticsCollection} by name. + * @deprecated in 9.0 */ +@Deprecated public class AnalyticsCollectionResolver { private final IndexNameExpressionResolver indexNameExpressionResolver; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionService.java index fc43783cfd8ad..322b3bdad0366 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsCollectionService.java @@ -33,7 +33,9 @@ * Until we have more specific need the {@link AnalyticsCollection} is just another representation * of a {@link org.elasticsearch.cluster.metadata.DataStream}. * As a consequence, this service is mostly a facade for the data stream API. + * @deprecated in 9.0 */ +@Deprecated public class AnalyticsCollectionService { private static final Logger logger = LogManager.getLogger(AnalyticsCollectionService.class); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsConstants.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsConstants.java index 9b45a13d18d01..59c6e445365aa 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsConstants.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsConstants.java @@ -7,6 +7,10 @@ package org.elasticsearch.xpack.application.analytics; +/** + * @deprecated in 9.0 + */ +@Deprecated public class AnalyticsConstants { private AnalyticsConstants() {} diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsEventIngestService.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsEventIngestService.java index 2166646fc4bfd..8df0a1a7f3bc3 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsEventIngestService.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsEventIngestService.java @@ -17,7 +17,9 @@ /** * Event emitter will index Analytics events submitted through a @{PostAnalyticsEventAction.Request} request. + * @deprecated in 9.0 */ +@Deprecated public class AnalyticsEventIngestService { private final AnalyticsCollectionResolver collectionResolver; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java index 99a239dd617a2..f772363939278 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/AnalyticsTemplateRegistry.java @@ -31,6 +31,10 @@ import static org.elasticsearch.xpack.application.analytics.AnalyticsConstants.TEMPLATE_VERSION_VARIABLE; import static org.elasticsearch.xpack.core.ClientHelper.ENT_SEARCH_ORIGIN; +/** + * @deprecated in 9.0 + */ +@Deprecated public class AnalyticsTemplateRegistry extends IndexTemplateRegistry { // This number must be incremented when we make changes to built-in templates. diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java index f6fd758882930..aace5620bd64f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/DeleteAnalyticsCollectionAction.java @@ -24,6 +24,10 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; +/** + * @deprecated in 9.0 + */ +@Deprecated public class DeleteAnalyticsCollectionAction { public static final String NAME = "cluster:admin/xpack/application/analytics/delete"; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java index 833fc64f03f85..8e68a99b74184 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/GetAnalyticsCollectionAction.java @@ -25,6 +25,10 @@ import java.util.List; import java.util.Objects; +/** + * @deprecated in 9.0 + */ +@Deprecated public class GetAnalyticsCollectionAction { public static final String NAME = "cluster:admin/xpack/application/analytics/get"; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java index 7d93c33208e5c..21056239648f1 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PostAnalyticsEventAction.java @@ -36,6 +36,10 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; import static org.elasticsearch.xcontent.ConstructingObjectParser.constructorArg; +/** + * @deprecated in 9.0 + */ +@Deprecated public class PostAnalyticsEventAction { public static final String NAME = "cluster:admin/xpack/application/analytics/post_event"; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java index 3017111468903..db77c9cca4172 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/PutAnalyticsCollectionAction.java @@ -23,6 +23,10 @@ import static org.elasticsearch.action.ValidateActions.addValidationError; +/** + * @deprecated in 9.0 + */ +@Deprecated public class PutAnalyticsCollectionAction { public static final String NAME = "cluster:admin/xpack/application/analytics/put"; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestDeleteAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestDeleteAnalyticsCollectionAction.java index 9165b4ee05d87..439905aac8480 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestDeleteAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestDeleteAnalyticsCollectionAction.java @@ -23,6 +23,10 @@ import static org.elasticsearch.rest.RestRequest.Method.DELETE; +/** + * @deprecated in 9.0 + */ +@Deprecated @ServerlessScope(Scope.PUBLIC) public class RestDeleteAnalyticsCollectionAction extends EnterpriseSearchBaseRestHandler { public RestDeleteAnalyticsCollectionAction(XPackLicenseState licenseState) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestGetAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestGetAnalyticsCollectionAction.java index 07b51d3a29ecd..1413a9b6c0a3f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestGetAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestGetAnalyticsCollectionAction.java @@ -23,6 +23,10 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; +/** + * @deprecated in 9.0 + */ +@Deprecated @ServerlessScope(Scope.PUBLIC) public class RestGetAnalyticsCollectionAction extends EnterpriseSearchBaseRestHandler { public RestGetAnalyticsCollectionAction(XPackLicenseState licenseState) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java index 5706e5e384053..bdfd1afb321a1 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPostAnalyticsEventAction.java @@ -29,6 +29,10 @@ import static org.elasticsearch.rest.RestRequest.Method.POST; +/** + * @deprecated in 9.0 + */ +@Deprecated @ServerlessScope(Scope.PUBLIC) public class RestPostAnalyticsEventAction extends EnterpriseSearchBaseRestHandler { public RestPostAnalyticsEventAction(XPackLicenseState licenseState) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionAction.java index 4c7f50af6f30d..05358cdadb002 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/RestPutAnalyticsCollectionAction.java @@ -24,6 +24,10 @@ import static org.elasticsearch.rest.RestRequest.Method.PUT; +/** + * @deprecated in 9.0 + */ +@Deprecated @ServerlessScope(Scope.PUBLIC) public class RestPutAnalyticsCollectionAction extends EnterpriseSearchBaseRestHandler { public RestPutAnalyticsCollectionAction(XPackLicenseState licenseState) { diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportDeleteAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportDeleteAnalyticsCollectionAction.java index 78fc1e171df13..88dc3ff2889c0 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportDeleteAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportDeleteAnalyticsCollectionAction.java @@ -15,6 +15,8 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -22,6 +24,13 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.application.analytics.AnalyticsCollectionService; +import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_API_ENDPOINT; +import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE; + +/** + * @deprecated in 9.0 + */ +@Deprecated public class TransportDeleteAnalyticsCollectionAction extends AcknowledgedTransportMasterNodeAction< DeleteAnalyticsCollectionAction.Request> { @@ -59,6 +68,8 @@ protected void masterOperation( ClusterState state, ActionListener listener ) { + DeprecationLogger.getLogger(TransportDeleteAnalyticsCollectionAction.class) + .warn(DeprecationCategory.API, BEHAVIORAL_ANALYTICS_API_ENDPOINT, BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE); analyticsCollectionService.deleteAnalyticsCollection(state, request, listener.map(v -> AcknowledgedResponse.TRUE)); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportGetAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportGetAnalyticsCollectionAction.java index 6d6a19aeb1468..62a038de76ae6 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportGetAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportGetAnalyticsCollectionAction.java @@ -13,6 +13,8 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -20,6 +22,13 @@ import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.application.analytics.AnalyticsCollectionService; +import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_API_ENDPOINT; +import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE; + +/** + * @deprecated in 9.0 + */ +@Deprecated public class TransportGetAnalyticsCollectionAction extends TransportMasterNodeReadAction< GetAnalyticsCollectionAction.Request, GetAnalyticsCollectionAction.Response> { @@ -54,6 +63,8 @@ protected void masterOperation( ClusterState state, ActionListener listener ) { + DeprecationLogger.getLogger(TransportDeleteAnalyticsCollectionAction.class) + .warn(DeprecationCategory.API, BEHAVIORAL_ANALYTICS_API_ENDPOINT, BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE); analyticsCollectionService.getAnalyticsCollection(state, request, listener); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPostAnalyticsEventAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPostAnalyticsEventAction.java index bf81ba75400b1..a260ff25cf5fc 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPostAnalyticsEventAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPostAnalyticsEventAction.java @@ -10,17 +10,25 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.application.analytics.AnalyticsEventIngestService; +import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_API_ENDPOINT; +import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE; + /** * Transport implementation for the {@link PostAnalyticsEventAction}. * It executes the {@link AnalyticsEventIngestService#addEvent} method if the XPack license is valid, else it calls * the listener's onFailure method with the appropriate exception. + * @deprecated in 9.0 */ +@Deprecated public class TransportPostAnalyticsEventAction extends HandledTransportAction< PostAnalyticsEventAction.Request, PostAnalyticsEventAction.Response> { @@ -31,7 +39,8 @@ public class TransportPostAnalyticsEventAction extends HandledTransportAction< public TransportPostAnalyticsEventAction( TransportService transportService, ActionFilters actionFilters, - AnalyticsEventIngestService eventEmitterService + AnalyticsEventIngestService eventEmitterService, + ClusterService clusterService ) { super( PostAnalyticsEventAction.NAME, @@ -49,6 +58,8 @@ protected void doExecute( PostAnalyticsEventAction.Request request, ActionListener listener ) { + DeprecationLogger.getLogger(TransportDeleteAnalyticsCollectionAction.class) + .warn(DeprecationCategory.API, BEHAVIORAL_ANALYTICS_API_ENDPOINT, BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE); this.eventEmitterService.addEvent(request, listener); } } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPutAnalyticsCollectionAction.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPutAnalyticsCollectionAction.java index d34d723c0e76e..b02bf3edbad2c 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPutAnalyticsCollectionAction.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/action/TransportPutAnalyticsCollectionAction.java @@ -14,13 +14,23 @@ import org.elasticsearch.cluster.block.ClusterBlockException; import org.elasticsearch.cluster.block.ClusterBlockLevel; import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.logging.DeprecationCategory; +import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.util.concurrent.EsExecutors; +import org.elasticsearch.features.FeatureService; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.application.analytics.AnalyticsCollectionService; +import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_API_ENDPOINT; +import static org.elasticsearch.xpack.application.EnterpriseSearch.BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE; + +/** + * @deprecated in 9.0 + */ +@Deprecated public class TransportPutAnalyticsCollectionAction extends TransportMasterNodeAction< PutAnalyticsCollectionAction.Request, PutAnalyticsCollectionAction.Response> { @@ -33,7 +43,8 @@ public TransportPutAnalyticsCollectionAction( ClusterService clusterService, ThreadPool threadPool, ActionFilters actionFilters, - AnalyticsCollectionService analyticsCollectionService + AnalyticsCollectionService analyticsCollectionService, + FeatureService featureService ) { super( PutAnalyticsCollectionAction.NAME, @@ -60,6 +71,8 @@ protected void masterOperation( ClusterState state, ActionListener listener ) { + DeprecationLogger.getLogger(TransportDeleteAnalyticsCollectionAction.class) + .warn(DeprecationCategory.API, BEHAVIORAL_ANALYTICS_API_ENDPOINT, BEHAVIORAL_ANALYTICS_DEPRECATION_MESSAGE); analyticsCollectionService.putAnalyticsCollection(state, request, listener); } diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEvent.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEvent.java index 3bf8c372cd304..cdf3415d43bd3 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEvent.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEvent.java @@ -31,7 +31,9 @@ /** * This class represents Analytics events object meant to be emitted to the event queue. + * @deprecated in 9.0 */ +@Deprecated public class AnalyticsEvent implements Writeable, ToXContentObject { public static final ParseField TIMESTAMP_FIELD = new ParseField("@timestamp"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEventFactory.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEventFactory.java index aa10b143f69e7..6cf1c0c094bcc 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEventFactory.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/AnalyticsEventFactory.java @@ -29,7 +29,9 @@ /** * A utility class for parsing {@link AnalyticsEvent} objects from payloads (such as HTTP POST request bodies) or input streams. + * @deprecated in 9.0 */ +@Deprecated public class AnalyticsEventFactory { public static final AnalyticsEventFactory INSTANCE = new AnalyticsEventFactory(); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/PageViewAnalyticsEvent.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/PageViewAnalyticsEvent.java index 81a4bf5ddfda1..ae0191cae0194 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/PageViewAnalyticsEvent.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/PageViewAnalyticsEvent.java @@ -22,6 +22,10 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.SessionAnalyticsEventField.SESSION_FIELD; import static org.elasticsearch.xpack.application.analytics.event.parser.field.UserAnalyticsEventField.USER_FIELD; +/** + * @deprecated in 9.0 + */ +@Deprecated public class PageViewAnalyticsEvent { private static final ObjectParser PARSER = ObjectParser.fromBuilder( "page_view_event", diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchAnalyticsEvent.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchAnalyticsEvent.java index 86453a33a342a..16ae078e079b4 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchAnalyticsEvent.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchAnalyticsEvent.java @@ -20,6 +20,10 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.SessionAnalyticsEventField.SESSION_FIELD; import static org.elasticsearch.xpack.application.analytics.event.parser.field.UserAnalyticsEventField.USER_FIELD; +/** + * @deprecated in 9.0 + */ +@Deprecated public class SearchAnalyticsEvent { private static final ObjectParser PARSER = ObjectParser.fromBuilder( "search_event", diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchClickAnalyticsEvent.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchClickAnalyticsEvent.java index aeffc818960e9..f5ae40b1d06b0 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchClickAnalyticsEvent.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/event/SearchClickAnalyticsEvent.java @@ -24,6 +24,10 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.SessionAnalyticsEventField.SESSION_FIELD; import static org.elasticsearch.xpack.application.analytics.event.parser.field.UserAnalyticsEventField.USER_FIELD; +/** + * @deprecated in 9.0 + */ +@Deprecated public class SearchClickAnalyticsEvent { private static final ObjectParser PARSER = ObjectParser.fromBuilder( diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/DocumentAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/DocumentAnalyticsEventField.java index 1cab0d1911975..af55af7b480f8 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/DocumentAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/DocumentAnalyticsEventField.java @@ -18,6 +18,10 @@ import static org.elasticsearch.common.Strings.requireNonBlank; +/** + * @deprecated in 9.0 + */ +@Deprecated public class DocumentAnalyticsEventField { public static final ParseField DOCUMENT_FIELD = new ParseField("document"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PageAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PageAnalyticsEventField.java index b2436be319190..07f4a6fd0556e 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PageAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PageAnalyticsEventField.java @@ -16,6 +16,10 @@ import java.util.HashMap; import java.util.Map; +/** + * @deprecated in 9.0 + */ +@Deprecated public class PageAnalyticsEventField { public static final ParseField PAGE_FIELD = new ParseField("page"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PaginationAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PaginationAnalyticsEventField.java index 3dca22d94ad7b..68b9d880a3a83 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PaginationAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/PaginationAnalyticsEventField.java @@ -17,6 +17,10 @@ import java.util.HashMap; import java.util.Map; +/** + * @deprecated in 9.0 + */ +@Deprecated public class PaginationAnalyticsEventField { public static final ParseField PAGINATION_FIELD = new ParseField("page"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchAnalyticsEventField.java index 438fb20dcf0bf..1595dc398e6f6 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchAnalyticsEventField.java @@ -20,6 +20,10 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.SearchFiltersAnalyticsEventField.SEARCH_FILTERS_FIELD; import static org.elasticsearch.xpack.application.analytics.event.parser.field.SortOrderAnalyticsEventField.SORT_FIELD; +/** + * @deprecated in 9.0 + */ +@Deprecated public class SearchAnalyticsEventField { public static final ParseField SEARCH_FIELD = new ParseField("search"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchFiltersAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchFiltersAnalyticsEventField.java index 261a6c623a7a5..b3023920eb2be 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchFiltersAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchFiltersAnalyticsEventField.java @@ -18,6 +18,10 @@ import java.util.List; import java.util.Map; +/** + * @deprecated in 9.0 + */ +@Deprecated public class SearchFiltersAnalyticsEventField { public static final ParseField SEARCH_FILTERS_FIELD = new ParseField("filters"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchResultAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchResultAnalyticsEventField.java index e95e777bb88ec..602f8b8f5a95f 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchResultAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SearchResultAnalyticsEventField.java @@ -19,6 +19,10 @@ import static org.elasticsearch.xpack.application.analytics.event.parser.field.DocumentAnalyticsEventField.DOCUMENT_FIELD; import static org.elasticsearch.xpack.application.analytics.event.parser.field.PageAnalyticsEventField.PAGE_FIELD; +/** + * @deprecated in 9.0 + */ +@Deprecated public class SearchResultAnalyticsEventField { public static final ParseField SEARCH_RESULTS_TOTAL_FIELD = new ParseField("total_results"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SessionAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SessionAnalyticsEventField.java index 866984c62b75e..96c4fcf83c85d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SessionAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SessionAnalyticsEventField.java @@ -20,6 +20,10 @@ import static org.elasticsearch.common.Strings.requireNonBlank; +/** + * @deprecated in 9.0 + */ +@Deprecated public class SessionAnalyticsEventField { public static final ParseField SESSION_FIELD = new ParseField("session"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SortOrderAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SortOrderAnalyticsEventField.java index df591572a7672..5ff7ebd843fc4 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SortOrderAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/SortOrderAnalyticsEventField.java @@ -18,6 +18,10 @@ import static org.elasticsearch.common.Strings.requireNonBlank; +/** + * @deprecated in 9.0 + */ +@Deprecated public class SortOrderAnalyticsEventField { public static final ParseField SORT_FIELD = new ParseField("sort"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/UserAnalyticsEventField.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/UserAnalyticsEventField.java index 186f581d4b6ff..76ff15d900bf9 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/UserAnalyticsEventField.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/event/parser/field/UserAnalyticsEventField.java @@ -19,6 +19,10 @@ import static org.elasticsearch.common.Strings.requireNonBlank; +/** + * @deprecated in 9.0 + */ +@Deprecated public class UserAnalyticsEventField { public static final ParseField USER_FIELD = new ParseField("user"); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventEmitter.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventEmitter.java index a49e29654b0d6..cb2df7f16db69 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventEmitter.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventEmitter.java @@ -34,6 +34,10 @@ import static org.elasticsearch.xpack.core.ClientHelper.ENT_SEARCH_ORIGIN; +/** + * @deprecated in 9.0 + */ +@Deprecated public class AnalyticsEventEmitter extends AbstractLifecycleComponent { private static final Logger logger = LogManager.getLogger(AnalyticsEventEmitter.class); diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventIngestConfig.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventIngestConfig.java index ac78a981c2d0c..fbf34e5e3fa4d 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventIngestConfig.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/AnalyticsEventIngestConfig.java @@ -20,7 +20,9 @@ * - flush_delay: the maximum delay between two flushes (default: 10s.) * - max_events_per_bulk: the maximum number of events that can be added to the bulk before flushing the bulk (default: 1000) * - max_number_of_retries: the maximum number of retries when bulk execution fails (default: 3) + * @deprecated in 9.0 */ +@Deprecated public class AnalyticsEventIngestConfig { private static final String SETTING_ROOT_PATH = "xpack.applications.behavioral_analytics.ingest"; diff --git a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactory.java b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactory.java index 343d050ffbfde..595d4147e9f1b 100644 --- a/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactory.java +++ b/x-pack/plugin/ent-search/src/main/java/org/elasticsearch/xpack/application/analytics/ingest/BulkProcessorFactory.java @@ -26,7 +26,9 @@ /** * Event ingest is done through a {@link BulkProcessor2}. This class is responsible for instantiating the bulk processor. + * @deprecated in 9.0 */ +@Deprecated public class BulkProcessorFactory { private static final Logger logger = LogManager.getLogger(AnalyticsEventEmitter.class); From 28f4d87ba51922f46d154d5ff57d82cd02251a62 Mon Sep 17 00:00:00 2001 From: Jill Guyonnet Date: Tue, 25 Feb 2025 16:38:47 +0100 Subject: [PATCH 16/20] [Fleet] Add `upgrade_attempts` to `.fleet-agents` index (#123256) * [Fleet] Add upgrade_attemps to .fleet-agents index * Change type to date --------- Co-authored-by: Elastic Machine --- .../template-resources/src/main/resources/fleet-agents.json | 3 +++ 1 file changed, 3 insertions(+) diff --git a/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json b/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json index 8b1c13f3152e8..fb7f96cd223d4 100644 --- a/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json +++ b/x-pack/plugin/core/template-resources/src/main/resources/fleet-agents.json @@ -310,6 +310,9 @@ "upgrade_status": { "type": "keyword" }, + "upgrade_attempts": { + "type": "date" + }, "user_provided_metadata": { "type": "object", "enabled": false From c02b484eab3f865b0b3862926d3855506a96b03d Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 25 Feb 2025 07:59:15 -0800 Subject: [PATCH 17/20] Use mock block factory in breaker tests (#122965) We have two leaks reported in breaker tests, but we do not have their traces. This PR integrates MockBlockFactory for breaker tests to help identify these potential leaks. Relates #122810 --- muted-tests.yml | 2 -- .../compute/data/BlockFactoryProvider.java | 12 ++++++++++++ .../esql/action/AbstractEsqlIntegTestCase.java | 18 ++++++++++++++++++ .../xpack/esql/action/EsqlActionBreakerIT.java | 18 ++++++++++++++++++ .../xpack/esql/plugin/EsqlPlugin.java | 16 +++++++++++++--- .../TransportEsqlAsyncGetResultsAction.java | 5 +++-- .../plugin/TransportEsqlAsyncStopAction.java | 5 +++-- .../esql/plugin/TransportEsqlQueryAction.java | 10 +++++----- 8 files changed, 72 insertions(+), 14 deletions(-) create mode 100644 x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactoryProvider.java diff --git a/muted-tests.yml b/muted-tests.yml index c5a962741eb0f..759a13acfa541 100644 --- a/muted-tests.yml +++ b/muted-tests.yml @@ -295,8 +295,6 @@ tests: - class: org.elasticsearch.search.basic.SearchWithRandomDisconnectsIT method: testSearchWithRandomDisconnects issue: https://github.com/elastic/elasticsearch/issues/122707 -- class: org.elasticsearch.xpack.esql.action.EsqlActionBreakerIT - issue: https://github.com/elastic/elasticsearch/issues/122810 - class: org.elasticsearch.snapshots.DedicatedClusterSnapshotRestoreIT method: testRestoreShrinkIndex issue: https://github.com/elastic/elasticsearch/issues/121717 diff --git a/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactoryProvider.java b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactoryProvider.java new file mode 100644 index 0000000000000..4c851cc226d35 --- /dev/null +++ b/x-pack/plugin/esql/compute/src/main/java/org/elasticsearch/compute/data/BlockFactoryProvider.java @@ -0,0 +1,12 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License + * 2.0; you may not use this file except in compliance with the Elastic License + * 2.0. + */ + +package org.elasticsearch.compute.data; + +public record BlockFactoryProvider(BlockFactory blockFactory) { + +} diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java index 90bf34b499390..971a2a7705c18 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/AbstractEsqlIntegTestCase.java @@ -17,7 +17,9 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.CollectionUtils; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockFactoryProvider; import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.compute.test.MockBlockFactory; import org.elasticsearch.core.TimeValue; import org.elasticsearch.core.Tuple; import org.elasticsearch.health.node.selection.HealthNode; @@ -60,6 +62,22 @@ public void ensureExchangesAreReleased() throws Exception { } public void ensureBlocksReleased() { + for (String node : internalCluster().getNodeNames()) { + BlockFactoryProvider blockFactoryProvider = internalCluster().getInstance(BlockFactoryProvider.class, node); + try { + if (blockFactoryProvider.blockFactory() instanceof MockBlockFactory mockBlockFactory) { + assertBusy(() -> { + try { + mockBlockFactory.ensureAllBlocksAreReleased(); + } catch (Exception e) { + throw new AssertionError(e); + } + }); + } + } catch (Exception e) { + throw new RuntimeException("failed to check mock factory", e); + } + } for (String node : internalCluster().getNodeNames()) { CircuitBreakerService breakerService = internalCluster().getInstance(CircuitBreakerService.class, node); CircuitBreaker reqBreaker = breakerService.getBreaker(CircuitBreaker.REQUEST); diff --git a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java index 57f6b55d31845..ef4a2c1e00369 100644 --- a/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java +++ b/x-pack/plugin/esql/src/internalClusterTest/java/org/elasticsearch/xpack/esql/action/EsqlActionBreakerIT.java @@ -12,18 +12,23 @@ import org.elasticsearch.action.DocWriteResponse; import org.elasticsearch.cluster.metadata.IndexMetadata; import org.elasticsearch.common.Strings; +import org.elasticsearch.common.breaker.CircuitBreaker; import org.elasticsearch.common.breaker.CircuitBreakingException; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeValue; +import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockFactoryProvider; import org.elasticsearch.compute.operator.exchange.ExchangeService; +import org.elasticsearch.compute.test.MockBlockFactory; import org.elasticsearch.core.TimeValue; import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.junit.annotations.TestLogging; import org.elasticsearch.xpack.esql.EsqlTestUtils; +import org.elasticsearch.xpack.esql.plugin.EsqlPlugin; import java.util.ArrayList; import java.util.Collection; @@ -50,6 +55,8 @@ protected Collection> nodePlugins() { List> plugins = new ArrayList<>(super.nodePlugins()); plugins.add(InternalExchangePlugin.class); plugins.add(InternalTransportSettingPlugin.class); + assertTrue(plugins.removeIf(p -> p.isAssignableFrom(EsqlPlugin.class))); + plugins.add(EsqlTestPluginWithMockBlockFactory.class); return plugins; } @@ -79,6 +86,17 @@ protected Settings nodeSettings(int nodeOrdinal, Settings otherSettings) { .build(); } + public static class EsqlTestPluginWithMockBlockFactory extends EsqlPlugin { + @Override + protected BlockFactoryProvider blockFactoryProvider( + CircuitBreaker breaker, + BigArrays bigArrays, + ByteSizeValue maxPrimitiveArraySize + ) { + return new BlockFactoryProvider(new MockBlockFactory(breaker, bigArrays, maxPrimitiveArraySize)); + } + } + private EsqlQueryResponse runWithBreaking(EsqlQueryRequest request) throws CircuitBreakingException { setRequestCircuitBreakerLimit(ByteSizeValue.ofBytes(between(256, 2048))); try { diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java index 4379e2e8041ae..7b7b056741e27 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/EsqlPlugin.java @@ -22,6 +22,7 @@ import org.elasticsearch.common.util.FeatureFlag; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockFactoryProvider; import org.elasticsearch.compute.data.BlockWritables; import org.elasticsearch.compute.lucene.LuceneOperator; import org.elasticsearch.compute.lucene.ValuesSourceReaderOperator; @@ -111,15 +112,24 @@ public Collection createComponents(PluginServices services) { BlockFactory.DEFAULT_MAX_BLOCK_PRIMITIVE_ARRAY_SIZE ); BigArrays bigArrays = services.indicesService().getBigArrays().withCircuitBreaking(); - BlockFactory blockFactory = new BlockFactory(circuitBreaker, bigArrays, maxPrimitiveArrayBlockSize); + var blockFactoryProvider = blockFactoryProvider(circuitBreaker, bigArrays, maxPrimitiveArrayBlockSize); setupSharedSecrets(); return List.of( new PlanExecutor(new IndexResolver(services.client()), services.telemetryProvider().getMeterRegistry(), getLicenseState()), - new ExchangeService(services.clusterService().getSettings(), services.threadPool(), ThreadPool.Names.SEARCH, blockFactory), - blockFactory + new ExchangeService( + services.clusterService().getSettings(), + services.threadPool(), + ThreadPool.Names.SEARCH, + blockFactoryProvider.blockFactory() + ), + blockFactoryProvider ); } + protected BlockFactoryProvider blockFactoryProvider(CircuitBreaker breaker, BigArrays bigArrays, ByteSizeValue maxPrimitiveArraySize) { + return new BlockFactoryProvider(new BlockFactory(breaker, bigArrays, maxPrimitiveArraySize)); + } + private void setupSharedSecrets() { try { // EsqlQueryRequestBuilder. initializes the shared secret access diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java index 5658db0599186..cc917dfa7a30c 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncGetResultsAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockFactoryProvider; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; @@ -44,7 +45,7 @@ public TransportEsqlAsyncGetResultsAction( Client client, ThreadPool threadPool, BigArrays bigArrays, - BlockFactory blockFactory + BlockFactoryProvider blockFactoryProvider ) { super( EsqlAsyncGetResultAction.NAME, @@ -57,7 +58,7 @@ public TransportEsqlAsyncGetResultsAction( bigArrays, EsqlQueryTask.class ); - this.blockFactory = blockFactory; + this.blockFactory = blockFactoryProvider.blockFactory(); } @Override diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java index a1a3072c69b8c..d5670d9876f1e 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlAsyncStopAction.java @@ -18,6 +18,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.util.concurrent.EsExecutors; import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockFactoryProvider; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.injection.guice.Inject; import org.elasticsearch.tasks.Task; @@ -64,13 +65,13 @@ public TransportEsqlAsyncStopAction( TransportEsqlAsyncGetResultsAction getResultsAction, Client client, ExchangeService exchangeService, - BlockFactory blockFactory + BlockFactoryProvider blockFactoryProvider ) { super(EsqlAsyncStopAction.NAME, transportService, actionFilters, AsyncStopRequest::new, EsExecutors.DIRECT_EXECUTOR_SERVICE); this.queryAction = queryAction; this.getResultsAction = getResultsAction; this.exchangeService = exchangeService; - this.blockFactory = blockFactory; + this.blockFactory = blockFactoryProvider.blockFactory(); this.transportService = transportService; this.clusterService = clusterService; this.security = new AsyncSearchSecurity( diff --git a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java index baf351c27107c..c0e6704ff65ee 100644 --- a/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java +++ b/x-pack/plugin/esql/src/main/java/org/elasticsearch/xpack/esql/plugin/TransportEsqlQueryAction.java @@ -20,7 +20,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.concurrent.EsExecutors; -import org.elasticsearch.compute.data.BlockFactory; +import org.elasticsearch.compute.data.BlockFactoryProvider; import org.elasticsearch.compute.operator.exchange.ExchangeService; import org.elasticsearch.core.Nullable; import org.elasticsearch.injection.guice.Inject; @@ -92,7 +92,7 @@ public TransportEsqlQueryAction( ClusterService clusterService, ThreadPool threadPool, BigArrays bigArrays, - BlockFactory blockFactory, + BlockFactoryProvider blockFactoryProvider, Client client, NamedWriteableRegistry registry, IndexNameExpressionResolver indexNameExpressionResolver, @@ -114,14 +114,14 @@ public TransportEsqlQueryAction( lookupLookupShardContextFactory, transportService, bigArrays, - blockFactory + blockFactoryProvider.blockFactory() ); this.lookupFromIndexService = new LookupFromIndexService( clusterService, lookupLookupShardContextFactory, transportService, bigArrays, - blockFactory + blockFactoryProvider.blockFactory() ); this.computeService = new ComputeService( searchService, @@ -132,7 +132,7 @@ public TransportEsqlQueryAction( clusterService, threadPool, bigArrays, - blockFactory + blockFactoryProvider.blockFactory() ); this.asyncTaskManagementService = new AsyncTaskManagementService<>( XPackPlugin.ASYNC_RESULTS_INDEX, From 48d20839fb21df68fea18acec7fe49d03216b1c6 Mon Sep 17 00:00:00 2001 From: "elastic-renovate-prod[bot]" <174716857+elastic-renovate-prod[bot]@users.noreply.github.com> Date: Wed, 26 Feb 2025 03:44:53 +1100 Subject: [PATCH 18/20] Update docker.elastic.co/wolfi/chainguard-base:latest Docker digest to c66fdaf (#123385) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This PR contains the following updates: | Package | Update | Change | |---|---|---| | docker.elastic.co/wolfi/chainguard-base | digest | `d74b1fd` -> `c66fdaf` | --- ### Configuration 📅 **Schedule**: Branch creation - "after 1pm on tuesday" (UTC), Automerge - At any time (no schedule defined). 🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied. ♻ **Rebasing**: Whenever PR becomes conflicted, or you tick the rebase/retry checkbox. 🔕 **Ignore**: Close this PR and you won't be reminded about this update again. --- - [ ] If you want to rebase/retry this PR, check this box --- This PR has been generated by [Renovate Bot](https://redirect.github.com/renovatebot/renovate). --- .../main/java/org/elasticsearch/gradle/internal/DockerBase.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java index b0129d26a818b..ec0c2521ac40b 100644 --- a/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java +++ b/build-tools-internal/src/main/java/org/elasticsearch/gradle/internal/DockerBase.java @@ -22,7 +22,7 @@ public enum DockerBase { // Chainguard based wolfi image with latest jdk // This is usually updated via renovatebot // spotless:off - WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:d74b1fda6b7fee2c90b410df258e005c049e0672fe16d79d00e58f14fb69f90b", + WOLFI("docker.elastic.co/wolfi/chainguard-base:latest@sha256:c66fdafe581a6ab1668a962015de4ce4666a60ed601d24f019f03bb4aaab8eeb", "-wolfi", "apk" ), From 88cf2487e7deb38686870e40bf08b2b729b7d848 Mon Sep 17 00:00:00 2001 From: Keith Massey Date: Tue, 25 Feb 2025 10:46:22 -0600 Subject: [PATCH 19/20] Fixing serialization of ScriptStats cache_evictions_history (#123384) --- docs/changelog/123384.yaml | 5 +++ .../org/elasticsearch/script/ScriptStats.java | 3 +- .../script/ScriptStatsTests.java | 31 +++++++++++++++++++ 3 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 docs/changelog/123384.yaml diff --git a/docs/changelog/123384.yaml b/docs/changelog/123384.yaml new file mode 100644 index 0000000000000..33d42b79c41ee --- /dev/null +++ b/docs/changelog/123384.yaml @@ -0,0 +1,5 @@ +pr: 123384 +summary: Fixing serialization of `ScriptStats` `cache_evictions_history` +area: Stats +type: bug +issues: [] diff --git a/server/src/main/java/org/elasticsearch/script/ScriptStats.java b/server/src/main/java/org/elasticsearch/script/ScriptStats.java index a25d9587bcb0e..08ce12232a956 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptStats.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptStats.java @@ -28,6 +28,7 @@ import java.util.Objects; import static org.elasticsearch.common.collect.Iterators.single; +import static org.elasticsearch.script.ScriptContextStats.Fields.CACHE_EVICTIONS_HISTORY; import static org.elasticsearch.script.ScriptContextStats.Fields.COMPILATIONS_HISTORY; import static org.elasticsearch.script.ScriptStats.Fields.CACHE_EVICTIONS; import static org.elasticsearch.script.ScriptStats.Fields.COMPILATIONS; @@ -205,7 +206,7 @@ public Iterator toXContentChunked(ToXContent.Params outerP builder.endObject(); } if (cacheEvictionsHistory != null && cacheEvictionsHistory.areTimingsEmpty() == false) { - builder.startObject(COMPILATIONS_HISTORY); + builder.startObject(CACHE_EVICTIONS_HISTORY); cacheEvictionsHistory.toXContent(builder, params); builder.endObject(); } diff --git a/server/src/test/java/org/elasticsearch/script/ScriptStatsTests.java b/server/src/test/java/org/elasticsearch/script/ScriptStatsTests.java index df81e8ebcbb16..b60afca0939ae 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptStatsTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptStatsTests.java @@ -78,6 +78,37 @@ public void testXContentChunked() throws IOException { assertThat(Strings.toString(builder), equalTo(expected)); } + public void testXContentChunkedHistory() throws Exception { + ScriptStats stats = new ScriptStats(5, 6, 7, new TimeSeries(10, 20, 30, 40), new TimeSeries(100, 200, 300, 400)); + final XContentBuilder builder = XContentFactory.jsonBuilder().prettyPrint(); + + builder.startObject(); + for (var it = stats.toXContentChunked(ToXContent.EMPTY_PARAMS); it.hasNext();) { + it.next().toXContent(builder, ToXContent.EMPTY_PARAMS); + } + builder.endObject(); + String expected = """ + { + "script" : { + "compilations" : 5, + "cache_evictions" : 6, + "compilation_limit_triggered" : 7, + "compilations_history" : { + "5m" : 10, + "15m" : 20, + "24h" : 30 + }, + "cache_evictions_history" : { + "5m" : 100, + "15m" : 200, + "24h" : 300 + }, + "contexts" : [ ] + } + }"""; + assertThat(Strings.toString(builder), equalTo(expected)); + } + public void testSerializeEmptyTimeSeries() throws IOException { ScriptContextStats stats = new ScriptContextStats("c", 3333, new TimeSeries(1111), new TimeSeries(2222)); From b5e482bc9182f2e3feaa9623593199cc9191e131 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 25 Feb 2025 09:58:39 -0800 Subject: [PATCH 20/20] Enable entitlements by default (#122907) Entitlements are almost complete. This commit enables them by default, in preparation for 8.18/9.0. --- .../java/org/elasticsearch/server/cli/SystemJvmOptions.java | 2 +- .../entitlement/initialization/EntitlementInitialization.java | 2 ++ .../main/java/org/elasticsearch/bootstrap/Elasticsearch.java | 4 ++-- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java index 97c70837c2187..d6e9ac8d9c9eb 100644 --- a/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java +++ b/distribution/tools/server-cli/src/main/java/org/elasticsearch/server/cli/SystemJvmOptions.java @@ -28,7 +28,7 @@ final class SystemJvmOptions { static List systemJvmOptions(Settings nodeSettings, final Map sysprops) { String distroType = sysprops.get("es.distribution.type"); boolean isHotspot = sysprops.getOrDefault("sun.management.compiler", "").contains("HotSpot"); - boolean entitlementsExplicitlyEnabled = Booleans.parseBoolean(sysprops.getOrDefault("es.entitlements.enabled", "false")); + boolean entitlementsExplicitlyEnabled = Booleans.parseBoolean(sysprops.getOrDefault("es.entitlements.enabled", "true")); // java 24+ only supports entitlements, but it may be enabled on earlier versions explicitly boolean useEntitlements = RuntimeVersionFeature.isSecurityManagerAvailable() == false || entitlementsExplicitlyEnabled; return Stream.of( diff --git a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java index 055660d54079b..64d84989b25c4 100644 --- a/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java +++ b/libs/entitlement/src/main/java/org/elasticsearch/entitlement/initialization/EntitlementInitialization.java @@ -178,6 +178,7 @@ private static PolicyManager createPolicyManager() { if (bootstrapArgs.pidFile() != null) { serverModuleFileDatas.add(FileData.ofPath(bootstrapArgs.pidFile(), READ_WRITE)); } + Collections.addAll( serverScopes, new Scope( @@ -186,6 +187,7 @@ private static PolicyManager createPolicyManager() { new CreateClassLoaderEntitlement(), new FilesEntitlement( List.of( + // TODO: what in es.base is accessing shared repo? FileData.ofRelativePath(Path.of(""), SHARED_REPO, READ_WRITE), FileData.ofRelativePath(Path.of(""), DATA, READ_WRITE) ) diff --git a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java index 5ccb4af45ade2..79f15bb208176 100644 --- a/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java +++ b/server/src/main/java/org/elasticsearch/bootstrap/Elasticsearch.java @@ -120,9 +120,9 @@ private static Bootstrap initPhase1() { final PrintStream out = getStdout(); final PrintStream err = getStderr(); final ServerArgs args; - final boolean entitlementsExplicitlyEnabled = Booleans.parseBoolean(System.getProperty("es.entitlements.enabled", "false")); + final boolean entitlementsEnabled = Booleans.parseBoolean(System.getProperty("es.entitlements.enabled", "true")); // java 24+ only supports entitlements, but it may be enabled on earlier versions explicitly - final boolean useEntitlements = RuntimeVersionFeature.isSecurityManagerAvailable() == false || entitlementsExplicitlyEnabled; + final boolean useEntitlements = RuntimeVersionFeature.isSecurityManagerAvailable() == false || entitlementsEnabled; try { initSecurityProperties();