From 9c35a848a4a7c389b2d2a1fa3116e6041c6c7890 Mon Sep 17 00:00:00 2001 From: Ashish Singh Date: Wed, 17 Apr 2024 16:45:26 +0530 Subject: [PATCH 01/15] Add remote path settings to RemoteStoreSettings (#13225) Signed-off-by: Ashish Singh --- .../remotestore/RemoteRestoreSnapshotIT.java | 2 +- .../metadata/MetadataCreateIndexService.java | 6 +- .../common/settings/ClusterSettings.java | 6 +- .../RemoteStorePathStrategyResolver.java | 27 ++------- .../opensearch/indices/IndicesService.java | 30 ---------- .../indices/RemoteStoreSettings.java | 59 ++++++++++++++++++- .../main/java/org/opensearch/node/Node.java | 3 +- .../MetadataRolloverServiceTests.java | 10 +++- .../MetadataCreateIndexServiceTests.java | 32 ++++++---- .../MetadataIndexTemplateServiceTests.java | 4 +- .../RemoteStorePathStrategyResolverTests.java | 32 ++++++---- .../indices/cluster/ClusterStateChanges.java | 4 +- .../snapshots/SnapshotResiliencyTests.java | 3 +- .../test/OpenSearchIntegTestCase.java | 4 +- 14 files changed, 133 insertions(+), 89 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteRestoreSnapshotIT.java b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteRestoreSnapshotIT.java index 95b7d4381da18..f8e5079b01a36 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteRestoreSnapshotIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotestore/RemoteRestoreSnapshotIT.java @@ -59,7 +59,7 @@ import static org.opensearch.index.remote.RemoteStoreEnums.DataCategory.TRANSLOG; import static org.opensearch.index.remote.RemoteStoreEnums.DataType.DATA; import static org.opensearch.index.remote.RemoteStoreEnums.DataType.METADATA; -import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING; +import static org.opensearch.indices.RemoteStoreSettings.CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING; import static org.opensearch.test.hamcrest.OpenSearchAssertions.assertAcked; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThan; diff --git a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java index d55ec3362b01f..0eba4d241f0fd 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/MetadataCreateIndexService.java @@ -98,6 +98,7 @@ import org.opensearch.indices.IndexCreationException; import org.opensearch.indices.IndicesService; import org.opensearch.indices.InvalidIndexNameException; +import org.opensearch.indices.RemoteStoreSettings; import org.opensearch.indices.ShardLimitValidator; import org.opensearch.indices.SystemIndices; import org.opensearch.indices.replication.common.ReplicationType; @@ -191,7 +192,8 @@ public MetadataCreateIndexService( final NamedXContentRegistry xContentRegistry, final SystemIndices systemIndices, final boolean forbidPrivateIndexSettings, - final AwarenessReplicaBalance awarenessReplicaBalance + final AwarenessReplicaBalance awarenessReplicaBalance, + final RemoteStoreSettings remoteStoreSettings ) { this.settings = settings; this.clusterService = clusterService; @@ -211,7 +213,7 @@ public MetadataCreateIndexService( createIndexTaskKey = clusterService.registerClusterManagerTask(ClusterManagerTaskKeys.CREATE_INDEX_KEY, true); Supplier minNodeVersionSupplier = () -> clusterService.state().nodes().getMinNodeVersion(); remoteStorePathStrategyResolver = isRemoteDataAttributePresent(settings) - ? new RemoteStorePathStrategyResolver(clusterService.getClusterSettings(), minNodeVersionSupplier) + ? new RemoteStorePathStrategyResolver(remoteStoreSettings, minNodeVersionSupplier) : null; } diff --git a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java index 2904d49c224d7..dab0f6bcf1c85 100644 --- a/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java +++ b/server/src/main/java/org/opensearch/common/settings/ClusterSettings.java @@ -713,8 +713,6 @@ public void apply(Settings value, Settings current, Settings previous) { RemoteStoreNodeService.MIGRATION_DIRECTION_SETTING, IndicesService.CLUSTER_REMOTE_INDEX_RESTRICT_ASYNC_DURABILITY_SETTING, IndicesService.CLUSTER_INDEX_RESTRICT_REPLICATION_TYPE_SETTING, - IndicesService.CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING, - IndicesService.CLUSTER_REMOTE_STORE_PATH_HASH_ALGORITHM_SETTING, // Admission Control Settings AdmissionControlSettings.ADMISSION_CONTROL_TRANSPORT_LAYER_MODE, @@ -732,7 +730,9 @@ public void apply(Settings value, Settings current, Settings previous) { RemoteStoreSettings.CLUSTER_REMOTE_INDEX_SEGMENT_METADATA_RETENTION_MAX_COUNT_SETTING, RemoteStoreSettings.CLUSTER_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING, - RemoteStoreSettings.CLUSTER_REMOTE_TRANSLOG_TRANSFER_TIMEOUT_SETTING + RemoteStoreSettings.CLUSTER_REMOTE_TRANSLOG_TRANSFER_TIMEOUT_SETTING, + RemoteStoreSettings.CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING, + RemoteStoreSettings.CLUSTER_REMOTE_STORE_PATH_HASH_ALGORITHM_SETTING ) ) ); diff --git a/server/src/main/java/org/opensearch/index/remote/RemoteStorePathStrategyResolver.java b/server/src/main/java/org/opensearch/index/remote/RemoteStorePathStrategyResolver.java index f6925bcbcc92d..a33f7522daaae 100644 --- a/server/src/main/java/org/opensearch/index/remote/RemoteStorePathStrategyResolver.java +++ b/server/src/main/java/org/opensearch/index/remote/RemoteStorePathStrategyResolver.java @@ -9,10 +9,9 @@ package org.opensearch.index.remote; import org.opensearch.Version; -import org.opensearch.common.settings.ClusterSettings; import org.opensearch.index.remote.RemoteStoreEnums.PathHashAlgorithm; import org.opensearch.index.remote.RemoteStoreEnums.PathType; -import org.opensearch.indices.IndicesService; +import org.opensearch.indices.RemoteStoreSettings; import java.util.function.Supplier; @@ -23,35 +22,21 @@ */ public class RemoteStorePathStrategyResolver { - private volatile PathType type; - - private volatile PathHashAlgorithm hashAlgorithm; - + private final RemoteStoreSettings remoteStoreSettings; private final Supplier minNodeVersionSupplier; - public RemoteStorePathStrategyResolver(ClusterSettings clusterSettings, Supplier minNodeVersionSupplier) { + public RemoteStorePathStrategyResolver(RemoteStoreSettings remoteStoreSettings, Supplier minNodeVersionSupplier) { + this.remoteStoreSettings = remoteStoreSettings; this.minNodeVersionSupplier = minNodeVersionSupplier; - type = clusterSettings.get(IndicesService.CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING); - hashAlgorithm = clusterSettings.get(IndicesService.CLUSTER_REMOTE_STORE_PATH_HASH_ALGORITHM_SETTING); - clusterSettings.addSettingsUpdateConsumer(IndicesService.CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING, this::setType); - clusterSettings.addSettingsUpdateConsumer(IndicesService.CLUSTER_REMOTE_STORE_PATH_HASH_ALGORITHM_SETTING, this::setHashAlgorithm); } public RemoteStorePathStrategy get() { PathType pathType; PathHashAlgorithm pathHashAlgorithm; // Min node version check ensures that we are enabling the new prefix type only when all the nodes understand it. - pathType = Version.CURRENT.compareTo(minNodeVersionSupplier.get()) <= 0 ? type : PathType.FIXED; + pathType = Version.CURRENT.compareTo(minNodeVersionSupplier.get()) <= 0 ? remoteStoreSettings.getPathType() : PathType.FIXED; // If the path type is fixed, hash algorithm is not applicable. - pathHashAlgorithm = pathType == PathType.FIXED ? null : hashAlgorithm; + pathHashAlgorithm = pathType == PathType.FIXED ? null : remoteStoreSettings.getPathHashAlgorithm(); return new RemoteStorePathStrategy(pathType, pathHashAlgorithm); } - - private void setType(PathType type) { - this.type = type; - } - - private void setHashAlgorithm(PathHashAlgorithm hashAlgorithm) { - this.hashAlgorithm = hashAlgorithm; - } } diff --git a/server/src/main/java/org/opensearch/indices/IndicesService.java b/server/src/main/java/org/opensearch/indices/IndicesService.java index 8cb240e8f6557..0187a9fb3b8ba 100644 --- a/server/src/main/java/org/opensearch/indices/IndicesService.java +++ b/server/src/main/java/org/opensearch/indices/IndicesService.java @@ -62,7 +62,6 @@ import org.opensearch.common.CheckedFunction; import org.opensearch.common.CheckedSupplier; import org.opensearch.common.Nullable; -import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.annotation.PublicApi; import org.opensearch.common.cache.policy.CachedQueryResult; import org.opensearch.common.cache.service.CacheService; @@ -125,8 +124,6 @@ import org.opensearch.index.query.QueryRewriteContext; import org.opensearch.index.recovery.RecoveryStats; import org.opensearch.index.refresh.RefreshStats; -import org.opensearch.index.remote.RemoteStoreEnums.PathHashAlgorithm; -import org.opensearch.index.remote.RemoteStoreEnums.PathType; import org.opensearch.index.remote.RemoteStoreStatsTrackerFactory; import org.opensearch.index.search.stats.SearchStats; import org.opensearch.index.seqno.RetentionLeaseStats; @@ -308,33 +305,6 @@ public class IndicesService extends AbstractLifecycleComponent Property.Final ); - /** - * This setting is used to set the remote store blob store path type strategy. This setting is effective only for - * remote store enabled cluster. - */ - @ExperimentalApi - public static final Setting CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING = new Setting<>( - "cluster.remote_store.index.path.type", - PathType.FIXED.toString(), - PathType::parseString, - Property.NodeScope, - Property.Dynamic - ); - - /** - * This setting is used to set the remote store blob store path hash algorithm strategy. This setting is effective only for - * remote store enabled cluster. This setting will come to effect if the {@link #CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING} - * is either {@code HASHED_PREFIX} or {@code HASHED_INFIX}. - */ - @ExperimentalApi - public static final Setting CLUSTER_REMOTE_STORE_PATH_HASH_ALGORITHM_SETTING = new Setting<>( - "cluster.remote_store.index.path.hash_algorithm", - PathHashAlgorithm.FNV_1A_COMPOSITE_1.toString(), - PathHashAlgorithm::parseString, - Property.NodeScope, - Property.Dynamic - ); - /** * The node's settings. */ diff --git a/server/src/main/java/org/opensearch/indices/RemoteStoreSettings.java b/server/src/main/java/org/opensearch/indices/RemoteStoreSettings.java index 7f2121093f8e8..e0a9f7a9e05c1 100644 --- a/server/src/main/java/org/opensearch/indices/RemoteStoreSettings.java +++ b/server/src/main/java/org/opensearch/indices/RemoteStoreSettings.java @@ -8,6 +8,7 @@ package org.opensearch.indices; +import org.opensearch.common.annotation.ExperimentalApi; import org.opensearch.common.annotation.PublicApi; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Setting; @@ -15,6 +16,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.index.IndexSettings; +import org.opensearch.index.remote.RemoteStoreEnums; /** * Settings for remote store @@ -65,12 +67,41 @@ public class RemoteStoreSettings { Property.Dynamic ); + /** + * This setting is used to set the remote store blob store path type strategy. This setting is effective only for + * remote store enabled cluster. + */ + @ExperimentalApi + public static final Setting CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING = new Setting<>( + "cluster.remote_store.index.path.type", + RemoteStoreEnums.PathType.FIXED.toString(), + RemoteStoreEnums.PathType::parseString, + Property.NodeScope, + Property.Dynamic + ); + + /** + * This setting is used to set the remote store blob store path hash algorithm strategy. This setting is effective only for + * remote store enabled cluster. This setting will come to effect if the {@link #CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING} + * is either {@code HASHED_PREFIX} or {@code HASHED_INFIX}. + */ + @ExperimentalApi + public static final Setting CLUSTER_REMOTE_STORE_PATH_HASH_ALGORITHM_SETTING = new Setting<>( + "cluster.remote_store.index.path.hash_algorithm", + RemoteStoreEnums.PathHashAlgorithm.FNV_1A_COMPOSITE_1.toString(), + RemoteStoreEnums.PathHashAlgorithm::parseString, + Property.NodeScope, + Property.Dynamic + ); + private volatile TimeValue clusterRemoteTranslogBufferInterval; private volatile int minRemoteSegmentMetadataFiles; private volatile TimeValue clusterRemoteTranslogTransferTimeout; + private volatile RemoteStoreEnums.PathType pathType; + private volatile RemoteStoreEnums.PathHashAlgorithm pathHashAlgorithm; public RemoteStoreSettings(Settings settings, ClusterSettings clusterSettings) { - this.clusterRemoteTranslogBufferInterval = CLUSTER_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING.get(settings); + clusterRemoteTranslogBufferInterval = CLUSTER_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING.get(settings); clusterSettings.addSettingsUpdateConsumer( CLUSTER_REMOTE_TRANSLOG_BUFFER_INTERVAL_SETTING, this::setClusterRemoteTranslogBufferInterval @@ -82,11 +113,17 @@ public RemoteStoreSettings(Settings settings, ClusterSettings clusterSettings) { this::setMinRemoteSegmentMetadataFiles ); - this.clusterRemoteTranslogTransferTimeout = CLUSTER_REMOTE_TRANSLOG_TRANSFER_TIMEOUT_SETTING.get(settings); + clusterRemoteTranslogTransferTimeout = CLUSTER_REMOTE_TRANSLOG_TRANSFER_TIMEOUT_SETTING.get(settings); clusterSettings.addSettingsUpdateConsumer( CLUSTER_REMOTE_TRANSLOG_TRANSFER_TIMEOUT_SETTING, this::setClusterRemoteTranslogTransferTimeout ); + + pathType = clusterSettings.get(CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING, this::setPathType); + + pathHashAlgorithm = clusterSettings.get(CLUSTER_REMOTE_STORE_PATH_HASH_ALGORITHM_SETTING); + clusterSettings.addSettingsUpdateConsumer(CLUSTER_REMOTE_STORE_PATH_HASH_ALGORITHM_SETTING, this::setPathHashAlgorithm); } public TimeValue getClusterRemoteTranslogBufferInterval() { @@ -112,4 +149,22 @@ public TimeValue getClusterRemoteTranslogTransferTimeout() { private void setClusterRemoteTranslogTransferTimeout(TimeValue clusterRemoteTranslogTransferTimeout) { this.clusterRemoteTranslogTransferTimeout = clusterRemoteTranslogTransferTimeout; } + + @ExperimentalApi + public RemoteStoreEnums.PathType getPathType() { + return pathType; + } + + @ExperimentalApi + public RemoteStoreEnums.PathHashAlgorithm getPathHashAlgorithm() { + return pathHashAlgorithm; + } + + private void setPathType(RemoteStoreEnums.PathType pathType) { + this.pathType = pathType; + } + + private void setPathHashAlgorithm(RemoteStoreEnums.PathHashAlgorithm pathHashAlgorithm) { + this.pathHashAlgorithm = pathHashAlgorithm; + } } diff --git a/server/src/main/java/org/opensearch/node/Node.java b/server/src/main/java/org/opensearch/node/Node.java index 7fa2b6c8ff497..a33fd71e21896 100644 --- a/server/src/main/java/org/opensearch/node/Node.java +++ b/server/src/main/java/org/opensearch/node/Node.java @@ -863,7 +863,8 @@ protected Node( xContentRegistry, systemIndices, forbidPrivateIndexSettings, - awarenessReplicaBalance + awarenessReplicaBalance, + remoteStoreSettings ); pluginsService.filterPlugins(Plugin.class) .forEach( diff --git a/server/src/test/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java b/server/src/test/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java index da9a8b928a779..50ffd7322544a 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/rollover/MetadataRolloverServiceTests.java @@ -81,6 +81,7 @@ import org.opensearch.index.mapper.MetadataFieldMapper; import org.opensearch.index.mapper.RoutingFieldMapper; import org.opensearch.index.shard.IndexEventListener; +import org.opensearch.indices.DefaultRemoteStoreSettings; import org.opensearch.indices.IndicesService; import org.opensearch.indices.InvalidIndexNameException; import org.opensearch.indices.ShardLimitValidator; @@ -738,7 +739,8 @@ public void testRolloverClusterState() throws Exception { null, systemIndices, false, - new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()) + new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()), + DefaultRemoteStoreSettings.INSTANCE ); MetadataIndexAliasesService indexAliasesService = new MetadataIndexAliasesService( clusterService, @@ -876,7 +878,8 @@ public void testRolloverClusterStateForDataStream() throws Exception { null, systemIndices, false, - new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()) + new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()), + DefaultRemoteStoreSettings.INSTANCE ); MetadataIndexAliasesService indexAliasesService = new MetadataIndexAliasesService( clusterService, @@ -1054,7 +1057,8 @@ public void testRolloverClusterStateForDataStreamNoTemplate() throws Exception { null, new SystemIndices(emptyMap()), false, - new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()) + new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()), + DefaultRemoteStoreSettings.INSTANCE ); MetadataIndexAliasesService indexAliasesService = new MetadataIndexAliasesService( clusterService, diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java index 1a9321a755fef..fad98a6609c3b 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataCreateIndexServiceTests.java @@ -76,10 +76,12 @@ import org.opensearch.index.remote.RemoteStoreEnums.PathHashAlgorithm; import org.opensearch.index.remote.RemoteStoreEnums.PathType; import org.opensearch.index.translog.Translog; +import org.opensearch.indices.DefaultRemoteStoreSettings; import org.opensearch.indices.IndexCreationException; import org.opensearch.indices.IndicesService; import org.opensearch.indices.InvalidAliasNameException; import org.opensearch.indices.InvalidIndexNameException; +import org.opensearch.indices.RemoteStoreSettings; import org.opensearch.indices.ShardLimitValidator; import org.opensearch.indices.SystemIndexDescriptor; import org.opensearch.indices.SystemIndices; @@ -702,7 +704,8 @@ public void testValidateIndexName() throws Exception { null, new SystemIndices(Collections.emptyMap()), false, - new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()) + new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()), + DefaultRemoteStoreSettings.INSTANCE ); validateIndexName( checkerService, @@ -788,7 +791,8 @@ public void testValidateDotIndex() { null, new SystemIndices(Collections.singletonMap("foo", systemIndexDescriptors)), false, - new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()) + new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()), + DefaultRemoteStoreSettings.INSTANCE ); // Check deprecations assertFalse(checkerService.validateDotIndex(".test2", false)); @@ -1213,7 +1217,8 @@ public void testvalidateIndexSettings() { null, new SystemIndices(Collections.emptyMap()), true, - new AwarenessReplicaBalance(settings, clusterService.getClusterSettings()) + new AwarenessReplicaBalance(settings, clusterService.getClusterSettings()), + DefaultRemoteStoreSettings.INSTANCE ); List validationErrors = checkerService.getIndexSettingsValidationErrors(settings, false, Optional.empty()); @@ -1332,7 +1337,8 @@ public void testClusterForceReplicationTypeInValidateIndexSettings() { null, new SystemIndices(Collections.emptyMap()), true, - new AwarenessReplicaBalance(forceClusterSettingEnabled, clusterService.getClusterSettings()) + new AwarenessReplicaBalance(forceClusterSettingEnabled, clusterService.getClusterSettings()), + DefaultRemoteStoreSettings.INSTANCE ); // Use DOCUMENT replication type setting for index creation final Settings indexSettings = Settings.builder().put(INDEX_REPLICATION_TYPE_SETTING.getKey(), ReplicationType.DOCUMENT).build(); @@ -1457,7 +1463,8 @@ public void testRemoteStoreDisabledByUserIndexSettings() { null, new SystemIndices(Collections.emptyMap()), true, - new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()) + new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()), + DefaultRemoteStoreSettings.INSTANCE ); final List validationErrors = checkerService.getIndexSettingsValidationErrors( @@ -1491,7 +1498,8 @@ public void testRemoteStoreOverrideSegmentRepoIndexSettings() { null, new SystemIndices(Collections.emptyMap()), true, - new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()) + new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()), + DefaultRemoteStoreSettings.INSTANCE ); final List validationErrors = checkerService.getIndexSettingsValidationErrors( @@ -1530,7 +1538,8 @@ public void testRemoteStoreOverrideTranslogRepoIndexSettings() { null, new SystemIndices(Collections.emptyMap()), true, - new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()) + new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()), + DefaultRemoteStoreSettings.INSTANCE ); final List validationErrors = checkerService.getIndexSettingsValidationErrors( @@ -1720,7 +1729,7 @@ private IndexMetadata testRemoteCustomData(boolean remoteStoreEnabled, PathType if (remoteStoreEnabled) { settingsBuilder.put(NODE_ATTRIBUTES.getKey() + REMOTE_STORE_SEGMENT_REPOSITORY_NAME_ATTRIBUTE_KEY, "test"); } - settingsBuilder.put(IndicesService.CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING.getKey(), pathType.toString()); + settingsBuilder.put(RemoteStoreSettings.CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING.getKey(), pathType.toString()); Settings settings = settingsBuilder.build(); ClusterService clusterService = mock(ClusterService.class); @@ -1734,6 +1743,7 @@ private IndexMetadata testRemoteCustomData(boolean remoteStoreEnabled, PathType when(clusterService.getSettings()).thenReturn(settings); when(clusterService.getClusterSettings()).thenReturn(clusterSettings); when(clusterService.state()).thenReturn(clusterState); + RemoteStoreSettings remoteStoreSettings = new RemoteStoreSettings(settings, clusterSettings); ThreadPool threadPool = new TestThreadPool(getTestName()); MetadataCreateIndexService metadataCreateIndexService = new MetadataCreateIndexService( @@ -1749,7 +1759,8 @@ private IndexMetadata testRemoteCustomData(boolean remoteStoreEnabled, PathType null, new SystemIndices(Collections.emptyMap()), true, - new AwarenessReplicaBalance(settings, clusterService.getClusterSettings()) + new AwarenessReplicaBalance(settings, clusterService.getClusterSettings()), + remoteStoreSettings ); CreateIndexClusterStateUpdateRequest request = new CreateIndexClusterStateUpdateRequest("create index", "test", "test"); Settings indexSettings = Settings.builder() @@ -1872,7 +1883,8 @@ public void testIndexLifecycleNameSetting() { null, new SystemIndices(Collections.emptyMap()), true, - new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()) + new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()), + DefaultRemoteStoreSettings.INSTANCE ); final List validationErrors = checkerService.getIndexSettingsValidationErrors(ilnSetting, true, Optional.empty()); diff --git a/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexTemplateServiceTests.java b/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexTemplateServiceTests.java index 0b8e64e31a523..0b99ffac67ee8 100644 --- a/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexTemplateServiceTests.java +++ b/server/src/test/java/org/opensearch/cluster/metadata/MetadataIndexTemplateServiceTests.java @@ -55,6 +55,7 @@ import org.opensearch.env.Environment; import org.opensearch.index.mapper.MapperParsingException; import org.opensearch.index.mapper.MapperService; +import org.opensearch.indices.DefaultRemoteStoreSettings; import org.opensearch.indices.IndexTemplateMissingException; import org.opensearch.indices.InvalidIndexTemplateException; import org.opensearch.indices.SystemIndices; @@ -2051,7 +2052,8 @@ private static List putTemplate(NamedXContentRegistry xContentRegistr xContentRegistry, new SystemIndices(Collections.emptyMap()), true, - new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()) + new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()), + DefaultRemoteStoreSettings.INSTANCE ); MetadataIndexTemplateService service = new MetadataIndexTemplateService( clusterService, diff --git a/server/src/test/java/org/opensearch/index/remote/RemoteStorePathStrategyResolverTests.java b/server/src/test/java/org/opensearch/index/remote/RemoteStorePathStrategyResolverTests.java index 4aa0d11601a05..d28ebc8c2e5da 100644 --- a/server/src/test/java/org/opensearch/index/remote/RemoteStorePathStrategyResolverTests.java +++ b/server/src/test/java/org/opensearch/index/remote/RemoteStorePathStrategyResolverTests.java @@ -13,17 +13,19 @@ import org.opensearch.common.settings.Settings; import org.opensearch.index.remote.RemoteStoreEnums.PathHashAlgorithm; import org.opensearch.index.remote.RemoteStoreEnums.PathType; +import org.opensearch.indices.RemoteStoreSettings; import org.opensearch.test.OpenSearchTestCase; -import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_STORE_PATH_HASH_ALGORITHM_SETTING; -import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING; +import static org.opensearch.indices.RemoteStoreSettings.CLUSTER_REMOTE_STORE_PATH_HASH_ALGORITHM_SETTING; +import static org.opensearch.indices.RemoteStoreSettings.CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING; public class RemoteStorePathStrategyResolverTests extends OpenSearchTestCase { public void testGetMinVersionOlder() { Settings settings = Settings.builder().put(CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING.getKey(), randomFrom(PathType.values())).build(); ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - RemoteStorePathStrategyResolver resolver = new RemoteStorePathStrategyResolver(clusterSettings, () -> Version.V_2_13_0); + RemoteStoreSettings remoteStoreSettings = new RemoteStoreSettings(settings, clusterSettings); + RemoteStorePathStrategyResolver resolver = new RemoteStorePathStrategyResolver(remoteStoreSettings, () -> Version.V_2_13_0); assertEquals(PathType.FIXED, resolver.get().getType()); assertNull(resolver.get().getHashAlgorithm()); } @@ -32,7 +34,8 @@ public void testGetMinVersionNewer() { PathType pathType = randomFrom(PathType.values()); Settings settings = Settings.builder().put(CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING.getKey(), pathType).build(); ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - RemoteStorePathStrategyResolver resolver = new RemoteStorePathStrategyResolver(clusterSettings, () -> Version.CURRENT); + RemoteStoreSettings remoteStoreSettings = new RemoteStoreSettings(settings, clusterSettings); + RemoteStorePathStrategyResolver resolver = new RemoteStorePathStrategyResolver(remoteStoreSettings, () -> Version.CURRENT); assertEquals(pathType, resolver.get().getType()); if (pathType.requiresHashAlgorithm()) { assertNotNull(resolver.get().getHashAlgorithm()); @@ -45,7 +48,8 @@ public void testGetStrategy() { // FIXED type Settings settings = Settings.builder().put(CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING.getKey(), PathType.FIXED).build(); ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - RemoteStorePathStrategyResolver resolver = new RemoteStorePathStrategyResolver(clusterSettings, () -> Version.CURRENT); + RemoteStoreSettings remoteStoreSettings = new RemoteStoreSettings(settings, clusterSettings); + RemoteStorePathStrategyResolver resolver = new RemoteStorePathStrategyResolver(remoteStoreSettings, () -> Version.CURRENT); assertEquals(PathType.FIXED, resolver.get().getType()); // FIXED type with hash algorithm @@ -54,20 +58,23 @@ public void testGetStrategy() { .put(CLUSTER_REMOTE_STORE_PATH_HASH_ALGORITHM_SETTING.getKey(), randomFrom(PathHashAlgorithm.values())) .build(); clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - resolver = new RemoteStorePathStrategyResolver(clusterSettings, () -> Version.CURRENT); + remoteStoreSettings = new RemoteStoreSettings(settings, clusterSettings); + resolver = new RemoteStorePathStrategyResolver(remoteStoreSettings, () -> Version.CURRENT); assertEquals(PathType.FIXED, resolver.get().getType()); // HASHED_PREFIX type with FNV_1A_COMPOSITE settings = Settings.builder().put(CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING.getKey(), PathType.HASHED_PREFIX).build(); clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - resolver = new RemoteStorePathStrategyResolver(clusterSettings, () -> Version.CURRENT); + remoteStoreSettings = new RemoteStoreSettings(settings, clusterSettings); + resolver = new RemoteStorePathStrategyResolver(remoteStoreSettings, () -> Version.CURRENT); assertEquals(PathType.HASHED_PREFIX, resolver.get().getType()); assertEquals(PathHashAlgorithm.FNV_1A_COMPOSITE_1, resolver.get().getHashAlgorithm()); // HASHED_PREFIX type with FNV_1A_COMPOSITE settings = Settings.builder().put(CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING.getKey(), PathType.HASHED_PREFIX).build(); clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - resolver = new RemoteStorePathStrategyResolver(clusterSettings, () -> Version.CURRENT); + remoteStoreSettings = new RemoteStoreSettings(settings, clusterSettings); + resolver = new RemoteStorePathStrategyResolver(remoteStoreSettings, () -> Version.CURRENT); assertEquals(PathType.HASHED_PREFIX, resolver.get().getType()); assertEquals(PathHashAlgorithm.FNV_1A_COMPOSITE_1, resolver.get().getHashAlgorithm()); @@ -77,7 +84,8 @@ public void testGetStrategy() { .put(CLUSTER_REMOTE_STORE_PATH_HASH_ALGORITHM_SETTING.getKey(), PathHashAlgorithm.FNV_1A_BASE64) .build(); clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - resolver = new RemoteStorePathStrategyResolver(clusterSettings, () -> Version.CURRENT); + remoteStoreSettings = new RemoteStoreSettings(settings, clusterSettings); + resolver = new RemoteStorePathStrategyResolver(remoteStoreSettings, () -> Version.CURRENT); assertEquals(PathType.HASHED_PREFIX, resolver.get().getType()); assertEquals(PathHashAlgorithm.FNV_1A_BASE64, resolver.get().getHashAlgorithm()); @@ -87,7 +95,8 @@ public void testGetStrategy() { .put(CLUSTER_REMOTE_STORE_PATH_HASH_ALGORITHM_SETTING.getKey(), PathHashAlgorithm.FNV_1A_BASE64) .build(); clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - resolver = new RemoteStorePathStrategyResolver(clusterSettings, () -> Version.CURRENT); + remoteStoreSettings = new RemoteStoreSettings(settings, clusterSettings); + resolver = new RemoteStorePathStrategyResolver(remoteStoreSettings, () -> Version.CURRENT); assertEquals(PathType.HASHED_PREFIX, resolver.get().getType()); assertEquals(PathHashAlgorithm.FNV_1A_BASE64, resolver.get().getHashAlgorithm()); } @@ -97,7 +106,8 @@ public void testGetStrategyWithDynamicUpdate() { // Default value Settings settings = Settings.builder().build(); ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - RemoteStorePathStrategyResolver resolver = new RemoteStorePathStrategyResolver(clusterSettings, () -> Version.CURRENT); + RemoteStoreSettings remoteStoreSettings = new RemoteStoreSettings(settings, clusterSettings); + RemoteStorePathStrategyResolver resolver = new RemoteStorePathStrategyResolver(remoteStoreSettings, () -> Version.CURRENT); assertEquals(PathType.FIXED, resolver.get().getType()); assertNull(resolver.get().getHashAlgorithm()); diff --git a/server/src/test/java/org/opensearch/indices/cluster/ClusterStateChanges.java b/server/src/test/java/org/opensearch/indices/cluster/ClusterStateChanges.java index dc4dca80ea110..17bd821ed0c8c 100644 --- a/server/src/test/java/org/opensearch/indices/cluster/ClusterStateChanges.java +++ b/server/src/test/java/org/opensearch/indices/cluster/ClusterStateChanges.java @@ -105,6 +105,7 @@ import org.opensearch.index.IndexService; import org.opensearch.index.mapper.MapperService; import org.opensearch.index.shard.IndexEventListener; +import org.opensearch.indices.DefaultRemoteStoreSettings; import org.opensearch.indices.IndicesService; import org.opensearch.indices.ShardLimitValidator; import org.opensearch.indices.SystemIndices; @@ -312,7 +313,8 @@ public IndexMetadata upgradeIndexMetadata(IndexMetadata indexMetadata, Version m xContentRegistry, systemIndices, true, - awarenessReplicaBalance + awarenessReplicaBalance, + DefaultRemoteStoreSettings.INSTANCE ); transportCloseIndexAction = new TransportCloseIndexAction( diff --git a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java index 4326e5fc63961..95a343f3b4025 100644 --- a/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java +++ b/server/src/test/java/org/opensearch/snapshots/SnapshotResiliencyTests.java @@ -2163,7 +2163,8 @@ public void onFailure(final Exception e) { namedXContentRegistry, systemIndices, false, - new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()) + new AwarenessReplicaBalance(Settings.EMPTY, clusterService.getClusterSettings()), + DefaultRemoteStoreSettings.INSTANCE ); actions.put( CreateIndexAction.INSTANCE, diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index c8d44efd8076a..41b8c994f4ec4 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -142,6 +142,7 @@ import org.opensearch.indices.IndicesQueryCache; import org.opensearch.indices.IndicesRequestCache; import org.opensearch.indices.IndicesService; +import org.opensearch.indices.RemoteStoreSettings; import org.opensearch.indices.replication.common.ReplicationType; import org.opensearch.indices.store.IndicesStore; import org.opensearch.monitor.os.OsInfo; @@ -211,7 +212,6 @@ import static org.opensearch.index.IndexSettings.INDEX_DOC_ID_FUZZY_SET_FALSE_POSITIVE_PROBABILITY_SETTING; import static org.opensearch.index.IndexSettings.INDEX_SOFT_DELETES_RETENTION_LEASE_PERIOD_SETTING; import static org.opensearch.index.query.QueryBuilders.matchAllQuery; -import static org.opensearch.indices.IndicesService.CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING; import static org.opensearch.indices.IndicesService.CLUSTER_REPLICATION_TYPE_SETTING; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_CLUSTER_STATE_REPOSITORY_NAME_ATTRIBUTE_KEY; import static org.opensearch.node.remotestore.RemoteStoreNodeAttribute.REMOTE_STORE_REPOSITORY_SETTINGS_ATTRIBUTE_KEY_PREFIX; @@ -2619,7 +2619,7 @@ private static Settings buildRemoteStoreNodeAttributes( settings.put(segmentRepoSettingsAttributeKeyPrefix + "compress", randomBoolean()) .put(segmentRepoSettingsAttributeKeyPrefix + "chunk_size", 200, ByteSizeUnit.BYTES); } - settings.put(CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING.getKey(), randomFrom(PathType.values())); + settings.put(RemoteStoreSettings.CLUSTER_REMOTE_STORE_PATH_TYPE_SETTING.getKey(), randomFrom(PathType.values())); return settings.build(); } From 51009b76d480ac5b03b667b6282a987d0695ca2f Mon Sep 17 00:00:00 2001 From: Atharva Sharma <60044988+atharvasharma61@users.noreply.github.com> Date: Wed, 17 Apr 2024 19:23:05 +0530 Subject: [PATCH 02/15] enabled mockTelemetryPlugin for IT and fixed OOM (#13054) * Disable stackTrace in MockSpanData by default Signed-off-by: Atharva Sharma * enabled MockTelemetryPlugin for ITs Signed-off-by: Atharva Sharma * Added the flag as system property Signed-off-by: Atharva Sharma * Applied java spotless check Signed-off-by: Atharva Sharma * Added details in changelog Signed-off-by: Atharva Sharma * Added details in TESTING.md Signed-off-by: Atharva Sharma * Update TESTING.md Signed-off-by: Atharva Sharma <60044988+atharvasharma61@users.noreply.github.com> --------- Signed-off-by: Atharva Sharma Signed-off-by: Atharva Sharma <60044988+atharvasharma61@users.noreply.github.com> --- CHANGELOG.md | 1 + TESTING.md | 7 ++++--- .../org/opensearch/test/OpenSearchIntegTestCase.java | 3 +-- .../telemetry/tracing/StrictCheckSpanProcessor.java | 12 +++++++++++- 4 files changed, 17 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fe6458937f791..22c46d3b02e9e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -55,6 +55,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Fix issue with feature flags where default value may not be honored ([#12849](https://github.com/opensearch-project/OpenSearch/pull/12849)) - Fix UOE While building Exists query for nested search_as_you_type field ([#12048](https://github.com/opensearch-project/OpenSearch/pull/12048)) - Client with Java 8 runtime and Apache HttpClient 5 Transport fails with java.lang.NoSuchMethodError: java.nio.ByteBuffer.flip()Ljava/nio/ByteBuffer ([#13100](https://github.com/opensearch-project/opensearch-java/pull/13100)) +- Enabled mockTelemetryPlugin for IT and fixed OOM issues ([#13054](https://github.com/opensearch-project/OpenSearch/pull/13054)) - Fix implement mark() and markSupported() in class FilterStreamInput ([#13098](https://github.com/opensearch-project/OpenSearch/pull/13098)) - Fix snapshot _status API to return correct status for partial snapshots ([#12812](https://github.com/opensearch-project/OpenSearch/pull/12812)) diff --git a/TESTING.md b/TESTING.md index 1c91d60840d61..80fc2412d736b 100644 --- a/TESTING.md +++ b/TESTING.md @@ -84,6 +84,7 @@ This will instruct all JVMs (including any that run cli tools such as creating t - In order to remotely attach a debugger to the process: `--debug-jvm` - In order to set a different keystore password: `--keystore-password yourpassword` - In order to set an OpenSearch setting, provide a setting with the following prefix: `-Dtests.opensearch.` +- In order to enable stack trace of the MockSpanData during testing, add: `-Dtests.telemetry.span.stack_traces=true` (Storing stack traces alongside span data can be useful for comprehensive debugging and performance optimization during testing, as it provides insights into the exact code paths and execution sequences, facilitating efficient issue identification and resolution. Note: Enabling this might lead to OOM issues while running ITs) ## Test case filtering @@ -412,8 +413,8 @@ Say you need to make a change to `main` and have a BWC layer in `5.x`. You will You may want to run BWC tests for a secure OpenSearch cluster. In order to do this, you will need to follow a few additional steps: 1. Clone the OpenSearch Security repository from https://github.com/opensearch-project/security. -2. Get both the old version of the Security plugin (the version you wish to come from) and the new version of the Security plugin (the version you wish to go to). This can be done either by fetching the maven artifact with a command like `wget https://repo1.maven.org/maven2/org/opensearch/plugin/opensearch-security/.0/opensearch-security-.0.zip` or by running `./gradlew assemble` from the base of the Security repository. -3. Move both of the Security artifacts into new directories at the path `/security/bwc-test/src/test/resources/.0`. You should end up with two different directories in `/security/bwc-test/src/test/resources/`, one named the old version and one the new version. +2. Get both the old version of the Security plugin (the version you wish to come from) and the new version of the Security plugin (the version you wish to go to). This can be done either by fetching the maven artifact with a command like `wget https://repo1.maven.org/maven2/org/opensearch/plugin/opensearch-security/.0/opensearch-security-.0.zip` or by running `./gradlew assemble` from the base of the Security repository. +3. Move both of the Security artifacts into new directories at the path `/security/bwc-test/src/test/resources/.0`. You should end up with two different directories in `/security/bwc-test/src/test/resources/`, one named the old version and one the new version. 4. Run the following command from the base of the Security repository: ``` @@ -428,7 +429,7 @@ You may want to run BWC tests for a secure OpenSearch cluster. In order to do th `-Dtests.security.manager=false` handles access issues when attempting to read the certificates from the file system. `-Dtests.opensearch.http.protocol=https` tells the wait for cluster startup task to do the right thing. -`-PcustomDistributionUrl=...` uses a custom build of the distribution of OpenSearch. This is unnecessary when running against standard/unmodified OpenSearch core distributions. +`-PcustomDistributionUrl=...` uses a custom build of the distribution of OpenSearch. This is unnecessary when running against standard/unmodified OpenSearch core distributions. ### Skip fetching latest diff --git a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java index 41b8c994f4ec4..286f0a1d91b4c 100644 --- a/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java +++ b/test/framework/src/main/java/org/opensearch/test/OpenSearchIntegTestCase.java @@ -2096,8 +2096,7 @@ protected boolean addMockGeoShapeFieldMapper() { * @return boolean. */ protected boolean addMockTelemetryPlugin() { - // setting to false until https://github.com/opensearch-project/OpenSearch/issues/12615 is resolved - return false; + return true; } /** diff --git a/test/telemetry/src/main/java/org/opensearch/test/telemetry/tracing/StrictCheckSpanProcessor.java b/test/telemetry/src/main/java/org/opensearch/test/telemetry/tracing/StrictCheckSpanProcessor.java index f7ebb3ee18a9b..4e72caeea584e 100644 --- a/test/telemetry/src/main/java/org/opensearch/test/telemetry/tracing/StrictCheckSpanProcessor.java +++ b/test/telemetry/src/main/java/org/opensearch/test/telemetry/tracing/StrictCheckSpanProcessor.java @@ -8,6 +8,7 @@ package org.opensearch.test.telemetry.tracing; +import org.opensearch.common.Booleans; import org.opensearch.telemetry.tracing.Span; import org.opensearch.test.telemetry.tracing.validators.AllSpansAreEndedProperly; import org.opensearch.test.telemetry.tracing.validators.AllSpansHaveUniqueId; @@ -29,6 +30,14 @@ public StrictCheckSpanProcessor() {} private static Map spanMap = new ConcurrentHashMap<>(); + // If you want to see the stack trace for each spanData, then + // update the flag to true or set the corresponding system property to true + // This is helpful in debugging the tests. Default value is false. + // Note: Enabling this might lead to OOM issues while running ITs. + private static final boolean isStackTraceForSpanEnabled = Booleans.parseBoolean( + System.getProperty("tests.telemetry.span.stack_traces", "false") + ); + @Override public void onStart(Span span) { spanMap.put(span.getSpanId(), toMockSpanData(span)); @@ -53,6 +62,7 @@ public List getFinishedSpanItems() { private MockSpanData toMockSpanData(Span span) { String parentSpanId = (span.getParentSpan() != null) ? span.getParentSpan().getSpanId() : ""; + StackTraceElement[] stackTrace = isStackTraceForSpanEnabled ? Thread.currentThread().getStackTrace() : null; MockSpanData spanData = new MockSpanData( span.getSpanId(), parentSpanId, @@ -60,7 +70,7 @@ private MockSpanData toMockSpanData(Span span) { System.nanoTime(), false, span.getSpanName(), - Thread.currentThread().getStackTrace(), + stackTrace, (span instanceof MockSpan) ? ((MockSpan) span).getAttributes() : Map.of() ); return spanData; From c1d5d76006c64d806a06a3ac4c0dfc962fc13d54 Mon Sep 17 00:00:00 2001 From: Craig Perkins Date: Wed, 17 Apr 2024 10:10:09 -0400 Subject: [PATCH 03/15] Update google dependencies in repository-gcs and discovery-gce (#13213) * Update google dependencies in repository-gcs and discovery-gce Signed-off-by: Craig Perkins * Add to CHANGELOG Signed-off-by: Craig Perkins * Fix test errors and mimic repository-gcs Signed-off-by: Craig Perkins --------- Signed-off-by: Craig Perkins --- CHANGELOG.md | 1 + buildSrc/version.properties | 1 + plugins/discovery-gce/build.gradle | 74 ++++--- .../google-api-client-1.23.0.jar.sha1 | 1 - .../google-api-client-1.35.2.jar.sha1 | 1 + ...services-compute-v1-rev160-1.23.0.jar.sha1 | 1 - ...services-compute-v1-rev235-1.25.0.jar.sha1 | 1 + .../google-http-client-1.23.0.jar.sha1 | 1 - .../google-http-client-1.44.1.jar.sha1 | 1 + .../google-http-client-gson-1.44.1.jar.sha1 | 1 + ...oogle-http-client-jackson2-1.23.0.jar.sha1 | 1 - ...oogle-http-client-jackson2-1.44.1.jar.sha1 | 1 + .../licenses/grpc-api-1.57.2.jar.sha1 | 1 + .../licenses/grpc-api-LICENSE.txt | 202 ++++++++++++++++++ .../licenses/grpc-api-NOTICE.txt | 0 .../licenses/guava-32.1.1-jre.jar.sha1 | 1 + .../discovery-gce/licenses/guava-LICENSE.txt | 202 ++++++++++++++++++ .../discovery-gce/licenses/guava-NOTICE.txt | 0 .../licenses/opencensus-LICENSE.txt | 202 ++++++++++++++++++ .../licenses/opencensus-NOTICE.txt | 0 .../licenses/opencensus-api-0.31.1.jar.sha1 | 1 + ...encensus-contrib-http-util-0.31.1.jar.sha1 | 1 + plugins/repository-gcs/build.gradle | 8 +- .../google-http-client-1.43.3.jar.sha1 | 1 - .../google-http-client-1.44.1.jar.sha1 | 1 + ...ogle-http-client-appengine-1.43.3.jar.sha1 | 1 - ...ogle-http-client-appengine-1.44.1.jar.sha1 | 1 + .../google-http-client-gson-1.43.3.jar.sha1 | 1 - .../google-http-client-gson-1.44.1.jar.sha1 | 1 + 29 files changed, 667 insertions(+), 42 deletions(-) delete mode 100644 plugins/discovery-gce/licenses/google-api-client-1.23.0.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/google-api-client-1.35.2.jar.sha1 delete mode 100644 plugins/discovery-gce/licenses/google-api-services-compute-v1-rev160-1.23.0.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/google-api-services-compute-v1-rev235-1.25.0.jar.sha1 delete mode 100644 plugins/discovery-gce/licenses/google-http-client-1.23.0.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/google-http-client-1.44.1.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/google-http-client-gson-1.44.1.jar.sha1 delete mode 100644 plugins/discovery-gce/licenses/google-http-client-jackson2-1.23.0.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/google-http-client-jackson2-1.44.1.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/grpc-api-1.57.2.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/grpc-api-LICENSE.txt create mode 100644 plugins/discovery-gce/licenses/grpc-api-NOTICE.txt create mode 100644 plugins/discovery-gce/licenses/guava-32.1.1-jre.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/guava-LICENSE.txt create mode 100644 plugins/discovery-gce/licenses/guava-NOTICE.txt create mode 100644 plugins/discovery-gce/licenses/opencensus-LICENSE.txt create mode 100644 plugins/discovery-gce/licenses/opencensus-NOTICE.txt create mode 100644 plugins/discovery-gce/licenses/opencensus-api-0.31.1.jar.sha1 create mode 100644 plugins/discovery-gce/licenses/opencensus-contrib-http-util-0.31.1.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-http-client-1.43.3.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-http-client-1.44.1.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-http-client-appengine-1.43.3.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-http-client-appengine-1.44.1.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-http-client-gson-1.43.3.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-http-client-gson-1.44.1.jar.sha1 diff --git a/CHANGELOG.md b/CHANGELOG.md index 22c46d3b02e9e..5efcfee3d9d9d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -38,6 +38,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Bump `gradle/wrapper-validation-action` from 2 to 3 ([#13192](https://github.com/opensearch-project/OpenSearch/pull/13192)) - Bump joda from 2.12.2 to 2.12.7 ([#13193](https://github.com/opensearch-project/OpenSearch/pull/13193)) - Bump bouncycastle from 1.77 to 1.78 ([#13243](https://github.com/opensearch-project/OpenSearch/pull/13243)) +- Update google dependencies in repository-gcs and discovery-gce ([#13213](https://github.com/opensearch-project/OpenSearch/pull/13213)) ### Changed - [BWC and API enforcement] Enforcing the presence of API annotations at build time ([#12872](https://github.com/opensearch-project/OpenSearch/pull/12872)) diff --git a/buildSrc/version.properties b/buildSrc/version.properties index ae9abcd58aa3a..6c6138ac9b7f6 100644 --- a/buildSrc/version.properties +++ b/buildSrc/version.properties @@ -22,6 +22,7 @@ antlr4 = 4.13.1 guava = 32.1.1-jre protobuf = 3.22.3 jakarta_annotation = 1.3.5 +google_http_client = 1.44.1 # when updating the JNA version, also update the version in buildSrc/build.gradle jna = 5.13.0 diff --git a/plugins/discovery-gce/build.gradle b/plugins/discovery-gce/build.gradle index 85efcc43fd65a..92cdda59d1c99 100644 --- a/plugins/discovery-gce/build.gradle +++ b/plugins/discovery-gce/build.gradle @@ -17,22 +17,23 @@ opensearchplugin { classname 'org.opensearch.plugin.discovery.gce.GceDiscoveryPlugin' } -versions << [ - 'google': '1.23.0' -] - dependencies { - api "com.google.apis:google-api-services-compute:v1-rev160-${versions.google}" - api "com.google.api-client:google-api-client:${versions.google}" + api "com.google.apis:google-api-services-compute:v1-rev235-1.25.0" + api "com.google.api-client:google-api-client:1.35.2" api "com.google.oauth-client:google-oauth-client:1.35.0" - api "com.google.http-client:google-http-client:${versions.google}" - api "com.google.http-client:google-http-client-jackson2:${versions.google}" + api "com.google.http-client:google-http-client:${versions.google_http_client}" + api "com.google.http-client:google-http-client-gson:${versions.google_http_client}" + api "com.google.http-client:google-http-client-jackson2:${versions.google_http_client}" api 'com.google.code.findbugs:jsr305:3.0.2' api "org.apache.httpcomponents:httpclient:${versions.httpclient}" api "org.apache.httpcomponents:httpcore:${versions.httpcore}" api "commons-logging:commons-logging:${versions.commonslogging}" api "org.apache.logging.log4j:log4j-1.2-api:${versions.log4j}" api "commons-codec:commons-codec:${versions.commonscodec}" + api 'io.grpc:grpc-api:1.57.2' + api 'io.opencensus:opencensus-api:0.31.1' + api 'io.opencensus:opencensus-contrib-http-util:0.31.1' + runtimeOnly "com.google.guava:guava:${versions.guava}" } restResources { @@ -43,6 +44,7 @@ restResources { tasks.named("dependencyLicenses").configure { mapping from: /google-.*/, to: 'google' + mapping from: /opencensus.*/, to: 'opencensus' } check { @@ -55,26 +57,36 @@ test { systemProperty 'tests.artifact', project.name } -thirdPartyAudit.ignoreMissingClasses( - // classes are missing - 'javax.jms.Message', - 'javax.servlet.ServletContextEvent', - 'javax.servlet.ServletContextListener', - 'org.apache.avalon.framework.logger.Logger', - 'org.apache.log.Hierarchy', - 'org.apache.log.Logger', - 'com.google.api.client.json.gson.GsonFactory', - 'com.google.common.base.Preconditions', - 'com.google.common.base.Splitter', - 'com.google.common.cache.CacheBuilder', - 'com.google.common.cache.CacheLoader', - 'com.google.common.cache.LoadingCache', - 'com.google.common.collect.ImmutableMap', - 'com.google.common.collect.ImmutableMap$Builder', - 'com.google.common.collect.ImmutableSet', - 'com.google.common.collect.Lists', - 'com.google.common.collect.Multiset', - 'com.google.common.collect.SortedMultiset', - 'com.google.common.collect.TreeMultiset', - 'com.google.common.io.BaseEncoding', -) +thirdPartyAudit { + ignoreViolations( + // uses internal java api: sun.misc.Unsafe + 'com.google.common.cache.Striped64', + 'com.google.common.cache.Striped64$1', + 'com.google.common.cache.Striped64$Cell', + 'com.google.common.hash.Striped64', + 'com.google.common.hash.Striped64$1', + 'com.google.common.hash.Striped64$Cell', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$1', + 'com.google.common.hash.LittleEndianByteArray$UnsafeByteArray$2', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper', + 'com.google.common.util.concurrent.AbstractFuture$UnsafeAtomicHelper$1', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator', + 'com.google.common.primitives.UnsignedBytes$LexicographicalComparatorHolder$UnsafeComparator$1', + ) + + ignoreMissingClasses( + 'com.google.api.client.http.apache.v2.ApacheHttpTransport', + 'com.google.common.util.concurrent.internal.InternalFutureFailureAccess', + 'com.google.common.util.concurrent.internal.InternalFutures', + 'com.google.gson.stream.JsonReader', + 'com.google.gson.stream.JsonToken', + 'com.google.gson.stream.JsonWriter', + 'javax.jms.Message', + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener', + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger' + ) +} diff --git a/plugins/discovery-gce/licenses/google-api-client-1.23.0.jar.sha1 b/plugins/discovery-gce/licenses/google-api-client-1.23.0.jar.sha1 deleted file mode 100644 index 0c35d8e08b91f..0000000000000 --- a/plugins/discovery-gce/licenses/google-api-client-1.23.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -522ea860eb48dee71dfe2c61a1fd09663539f556 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/google-api-client-1.35.2.jar.sha1 b/plugins/discovery-gce/licenses/google-api-client-1.35.2.jar.sha1 new file mode 100644 index 0000000000000..47245f9429e7d --- /dev/null +++ b/plugins/discovery-gce/licenses/google-api-client-1.35.2.jar.sha1 @@ -0,0 +1 @@ +2d737980e34c674da4ff0ae124b80caefdc7198a \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/google-api-services-compute-v1-rev160-1.23.0.jar.sha1 b/plugins/discovery-gce/licenses/google-api-services-compute-v1-rev160-1.23.0.jar.sha1 deleted file mode 100644 index 17219dfe7ecc9..0000000000000 --- a/plugins/discovery-gce/licenses/google-api-services-compute-v1-rev160-1.23.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -004169bfe1cf0e8b2013c9c479e43b731958bc64 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/google-api-services-compute-v1-rev235-1.25.0.jar.sha1 b/plugins/discovery-gce/licenses/google-api-services-compute-v1-rev235-1.25.0.jar.sha1 new file mode 100644 index 0000000000000..f79af846281de --- /dev/null +++ b/plugins/discovery-gce/licenses/google-api-services-compute-v1-rev235-1.25.0.jar.sha1 @@ -0,0 +1 @@ +67bf1ac84286b4f9ea996a90f6e91e36dc648aff \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/google-http-client-1.23.0.jar.sha1 b/plugins/discovery-gce/licenses/google-http-client-1.23.0.jar.sha1 deleted file mode 100644 index 5526275d5a15f..0000000000000 --- a/plugins/discovery-gce/licenses/google-http-client-1.23.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8e86c84ff3c98eca6423e97780325b299133d858 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/google-http-client-1.44.1.jar.sha1 b/plugins/discovery-gce/licenses/google-http-client-1.44.1.jar.sha1 new file mode 100644 index 0000000000000..501f268254fbc --- /dev/null +++ b/plugins/discovery-gce/licenses/google-http-client-1.44.1.jar.sha1 @@ -0,0 +1 @@ +d8956bacb8a4011365fa15a690482c49a70c78c5 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/google-http-client-gson-1.44.1.jar.sha1 b/plugins/discovery-gce/licenses/google-http-client-gson-1.44.1.jar.sha1 new file mode 100644 index 0000000000000..90ddf3ddc5ee6 --- /dev/null +++ b/plugins/discovery-gce/licenses/google-http-client-gson-1.44.1.jar.sha1 @@ -0,0 +1 @@ +f3b8967c6f7078da6380687859d0873105f84d39 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/google-http-client-jackson2-1.23.0.jar.sha1 b/plugins/discovery-gce/licenses/google-http-client-jackson2-1.23.0.jar.sha1 deleted file mode 100644 index 510856a517f04..0000000000000 --- a/plugins/discovery-gce/licenses/google-http-client-jackson2-1.23.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fd6761f4046a8cb0455e6fa5f58e12b061e9826e \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/google-http-client-jackson2-1.44.1.jar.sha1 b/plugins/discovery-gce/licenses/google-http-client-jackson2-1.44.1.jar.sha1 new file mode 100644 index 0000000000000..4472ffbbebe1c --- /dev/null +++ b/plugins/discovery-gce/licenses/google-http-client-jackson2-1.44.1.jar.sha1 @@ -0,0 +1 @@ +3f1947de0fd9eb250af16abe6103c11e68d11635 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/grpc-api-1.57.2.jar.sha1 b/plugins/discovery-gce/licenses/grpc-api-1.57.2.jar.sha1 new file mode 100644 index 0000000000000..8b320fdd2f9cc --- /dev/null +++ b/plugins/discovery-gce/licenses/grpc-api-1.57.2.jar.sha1 @@ -0,0 +1 @@ +c71a006b81ddae7bc4b7cb1d2da78c1b173761f4 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/grpc-api-LICENSE.txt b/plugins/discovery-gce/licenses/grpc-api-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/discovery-gce/licenses/grpc-api-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/discovery-gce/licenses/grpc-api-NOTICE.txt b/plugins/discovery-gce/licenses/grpc-api-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/discovery-gce/licenses/guava-32.1.1-jre.jar.sha1 b/plugins/discovery-gce/licenses/guava-32.1.1-jre.jar.sha1 new file mode 100644 index 0000000000000..0d791b5d3f55b --- /dev/null +++ b/plugins/discovery-gce/licenses/guava-32.1.1-jre.jar.sha1 @@ -0,0 +1 @@ +ad575652d84153075dd41ec6177ccb15251262b2 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/guava-LICENSE.txt b/plugins/discovery-gce/licenses/guava-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/discovery-gce/licenses/guava-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/discovery-gce/licenses/guava-NOTICE.txt b/plugins/discovery-gce/licenses/guava-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/discovery-gce/licenses/opencensus-LICENSE.txt b/plugins/discovery-gce/licenses/opencensus-LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/plugins/discovery-gce/licenses/opencensus-LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/plugins/discovery-gce/licenses/opencensus-NOTICE.txt b/plugins/discovery-gce/licenses/opencensus-NOTICE.txt new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/plugins/discovery-gce/licenses/opencensus-api-0.31.1.jar.sha1 b/plugins/discovery-gce/licenses/opencensus-api-0.31.1.jar.sha1 new file mode 100644 index 0000000000000..03760848f76ef --- /dev/null +++ b/plugins/discovery-gce/licenses/opencensus-api-0.31.1.jar.sha1 @@ -0,0 +1 @@ +66a60c7201c2b8b20ce495f0295b32bb0ccbbc57 \ No newline at end of file diff --git a/plugins/discovery-gce/licenses/opencensus-contrib-http-util-0.31.1.jar.sha1 b/plugins/discovery-gce/licenses/opencensus-contrib-http-util-0.31.1.jar.sha1 new file mode 100644 index 0000000000000..4e123da3ab45f --- /dev/null +++ b/plugins/discovery-gce/licenses/opencensus-contrib-http-util-0.31.1.jar.sha1 @@ -0,0 +1 @@ +3c13fc5715231fadb16a9b74a44d9d59c460cfa8 \ No newline at end of file diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 1dfc64e19601c..c4b1ab8d6875e 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -75,10 +75,10 @@ dependencies { runtimeOnly "com.google.guava:guava:${versions.guava}" api 'com.google.guava:failureaccess:1.0.1' - api 'com.google.http-client:google-http-client:1.43.3' - api 'com.google.http-client:google-http-client-appengine:1.43.3' - api 'com.google.http-client:google-http-client-gson:1.43.3' - api 'com.google.http-client:google-http-client-jackson2:1.44.1' + api "com.google.http-client:google-http-client:${versions.google_http_client}" + api "com.google.http-client:google-http-client-appengine:${versions.google_http_client}" + api "com.google.http-client:google-http-client-gson:${versions.google_http_client}" + api "com.google.http-client:google-http-client-jackson2:${versions.google_http_client}" api 'com.google.oauth-client:google-oauth-client:1.34.1' diff --git a/plugins/repository-gcs/licenses/google-http-client-1.43.3.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-1.43.3.jar.sha1 deleted file mode 100644 index 800467de8bdf3..0000000000000 --- a/plugins/repository-gcs/licenses/google-http-client-1.43.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a758b82e55a2f5f681e289c5ed384d3dbda6f3cd \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-1.44.1.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-1.44.1.jar.sha1 new file mode 100644 index 0000000000000..501f268254fbc --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-1.44.1.jar.sha1 @@ -0,0 +1 @@ +d8956bacb8a4011365fa15a690482c49a70c78c5 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-appengine-1.43.3.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-appengine-1.43.3.jar.sha1 deleted file mode 100644 index 4adcca6a55902..0000000000000 --- a/plugins/repository-gcs/licenses/google-http-client-appengine-1.43.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -09d6cbdde6ea3469a67601a811b4e83de3e68a79 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-appengine-1.44.1.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-appengine-1.44.1.jar.sha1 new file mode 100644 index 0000000000000..7b27b165453cd --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-appengine-1.44.1.jar.sha1 @@ -0,0 +1 @@ +da4f9f691edb7a9f00cd806157a4990cb7e07711 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-gson-1.43.3.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-gson-1.43.3.jar.sha1 deleted file mode 100644 index 43f4fe4a127e1..0000000000000 --- a/plugins/repository-gcs/licenses/google-http-client-gson-1.43.3.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -252e267acf720ef6333488740a696a1d5e204639 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-gson-1.44.1.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-gson-1.44.1.jar.sha1 new file mode 100644 index 0000000000000..90ddf3ddc5ee6 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-gson-1.44.1.jar.sha1 @@ -0,0 +1 @@ +f3b8967c6f7078da6380687859d0873105f84d39 \ No newline at end of file From 84679dea01cdae9d50bd9cd6b8c39062df958d40 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Wed, 17 Apr 2024 14:12:14 -0400 Subject: [PATCH 04/15] Snapshot _status API to return correct status for partial snapshots (update version) (#13262) Signed-off-by: Andriy Redko --- .../main/java/org/opensearch/cluster/SnapshotsInProgress.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/server/src/main/java/org/opensearch/cluster/SnapshotsInProgress.java b/server/src/main/java/org/opensearch/cluster/SnapshotsInProgress.java index 8dbdcaa541734..d658f38430dd9 100644 --- a/server/src/main/java/org/opensearch/cluster/SnapshotsInProgress.java +++ b/server/src/main/java/org/opensearch/cluster/SnapshotsInProgress.java @@ -747,7 +747,7 @@ public void writeTo(StreamOutput out) throws IOException { snapshot.writeTo(out); out.writeBoolean(includeGlobalState); out.writeBoolean(partial); - if ((out.getVersion().before(Version.V_3_0_0)) && state == State.PARTIAL) { + if ((out.getVersion().before(Version.V_2_14_0)) && state == State.PARTIAL) { // Setting to SUCCESS for partial snapshots in older versions to maintain backward compatibility out.writeByte(State.SUCCESS.value()); } else { From 61ff5f8d53bbb5790466544125553bff9f0c8ccc Mon Sep 17 00:00:00 2001 From: Lakshya Taragi <157457166+ltaragi@users.noreply.github.com> Date: Thu, 18 Apr 2024 17:03:35 +0530 Subject: [PATCH 05/15] Fix flakiness in testDontAllowSwitchingCompatibilityModeForClusterWithMultipleVersions (#13281) Signed-off-by: Lakshya Taragi --- .../TransportClusterManagerNodeActionTests.java | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/server/src/test/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeActionTests.java b/server/src/test/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeActionTests.java index b3c58164fccbb..b3eb2443fa940 100644 --- a/server/src/test/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeActionTests.java +++ b/server/src/test/java/org/opensearch/action/support/clustermanager/TransportClusterManagerNodeActionTests.java @@ -91,8 +91,6 @@ import static org.opensearch.node.remotestore.RemoteStoreNodeService.REMOTE_STORE_COMPATIBILITY_MODE_SETTING; import static org.opensearch.test.ClusterServiceUtils.createClusterService; import static org.opensearch.test.ClusterServiceUtils.setState; -import static org.opensearch.test.VersionUtils.randomCompatibleVersion; -import static org.opensearch.test.VersionUtils.randomOpenSearchVersion; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -826,10 +824,9 @@ public void testDontAllowSwitchingCompatibilityModeForClusterWithMultipleVersion request.persistentSettings(intendedCompatibilityModeSettings); // two different but compatible open search versions for the discovery nodes - final Version version1 = randomOpenSearchVersion(random()); - final Version version2 = randomCompatibleVersion(random(), version1); + final Version version1 = Version.V_2_13_0; + final Version version2 = Version.V_2_13_1; - assert version1.equals(version2) == false : "current nodes in the cluster must be of different versions"; DiscoveryNode discoveryNode1 = new DiscoveryNode( UUIDs.base64UUID(), buildNewFakeTransportAddress(), From cdb57fa475cc622c7255c045cd26725bd69960a0 Mon Sep 17 00:00:00 2001 From: gaobinlong Date: Thu, 18 Apr 2024 19:42:43 +0800 Subject: [PATCH 06/15] Update supported version in yaml test file for the primary_only parameter in force-merge API (#13279) * Update supported version for the primary_only parameter in force-merge API Signed-off-by: Gao Binlong * Update reason to make it consitent with 2.x Signed-off-by: Gao Binlong --------- Signed-off-by: Gao Binlong --- .../rest-api-spec/test/indices.forcemerge/10_basic.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.forcemerge/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.forcemerge/10_basic.yml index 39fb1604d9596..7410e020e1a91 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.forcemerge/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.forcemerge/10_basic.yml @@ -31,8 +31,8 @@ --- "Test primary_only parameter": - skip: - version: " - 2.99.99" - reason: "primary_only is available in 3.0+" + version: " - 2.12.99" + reason: "primary_only is available in 2.13.0+" - do: indices.create: From c4843c9bd792f1e2017319a4b20c12b1b3448378 Mon Sep 17 00:00:00 2001 From: Mohit Godwani <81609427+mgodwan@users.noreply.github.com> Date: Thu, 18 Apr 2024 18:25:01 +0530 Subject: [PATCH 07/15] Add release notes for 1.3.16 (#13280) Signed-off-by: mgodwan --- release-notes/opensearch.release-notes-1.3.16.md | 4 ++++ 1 file changed, 4 insertions(+) create mode 100644 release-notes/opensearch.release-notes-1.3.16.md diff --git a/release-notes/opensearch.release-notes-1.3.16.md b/release-notes/opensearch.release-notes-1.3.16.md new file mode 100644 index 0000000000000..a434e419d5780 --- /dev/null +++ b/release-notes/opensearch.release-notes-1.3.16.md @@ -0,0 +1,4 @@ +## 2024-04-18 Version 1.3.16 Release Notes + +### Upgrades +- Bump `netty` from 4.1.107.Final to 4.1.109.Final ([#12924](https://github.com/opensearch-project/OpenSearch/pull/12924), [#13233](https://github.com/opensearch-project/OpenSearch/pull/13233)) From ba25c234d91e0ab948e3387dc87785d8e99fba9e Mon Sep 17 00:00:00 2001 From: Peter Nied Date: Thu, 18 Apr 2024 10:00:44 -0500 Subject: [PATCH 08/15] Disabling breaking changes check on main (#13283) Signed-off-by: Peter Nied --- .github/workflows/detect-breaking-change.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/detect-breaking-change.yml b/.github/workflows/detect-breaking-change.yml index 1913d070e8c24..e5d3fddbd36f5 100644 --- a/.github/workflows/detect-breaking-change.yml +++ b/.github/workflows/detect-breaking-change.yml @@ -1,6 +1,8 @@ name: "Detect Breaking Changes" on: - pull_request + pull_request: + branches-ignore: + - main # This branch represents a to-be-released version of OpenSearch where breaking changes are allowed jobs: detect-breaking-change: From f5c3ef9fa329df83083dc607ccdb74f5c65b3198 Mon Sep 17 00:00:00 2001 From: Vacha Shah Date: Thu, 18 Apr 2024 09:00:46 -0700 Subject: [PATCH 09/15] Replacing InboundMessage with NativeInboundMessage for deprecation (#13126) * Replacing InboundMessage with NativeInboundMessage for deprecation Signed-off-by: Vacha Shah * Removing InboundMessage class Signed-off-by: Vacha Shah --------- Signed-off-by: Vacha Shah --- .../transport/InboundAggregator.java | 7 +- .../opensearch/transport/InboundMessage.java | 108 ------------------ .../transport/NativeMessageHandler.java | 7 +- .../opensearch/transport/TcpTransport.java | 12 -- .../opensearch/transport/TransportLogger.java | 5 +- .../NativeInboundBytesHandler.java | 5 +- .../transport/InboundAggregatorTests.java | 13 ++- .../transport/InboundHandlerTests.java | 75 +++++++++--- .../transport/InboundPipelineTests.java | 3 +- .../transport/OutboundHandlerTests.java | 3 +- 10 files changed, 80 insertions(+), 158 deletions(-) delete mode 100644 server/src/main/java/org/opensearch/transport/InboundMessage.java diff --git a/server/src/main/java/org/opensearch/transport/InboundAggregator.java b/server/src/main/java/org/opensearch/transport/InboundAggregator.java index e894331f3b64e..f52875d880b4f 100644 --- a/server/src/main/java/org/opensearch/transport/InboundAggregator.java +++ b/server/src/main/java/org/opensearch/transport/InboundAggregator.java @@ -40,6 +40,7 @@ import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.core.common.bytes.CompositeBytesReference; +import org.opensearch.transport.nativeprotocol.NativeInboundMessage; import java.io.IOException; import java.util.ArrayList; @@ -113,7 +114,7 @@ public void aggregate(ReleasableBytesReference content) { } } - public InboundMessage finishAggregation() throws IOException { + public NativeInboundMessage finishAggregation() throws IOException { ensureOpen(); final ReleasableBytesReference releasableContent; if (isFirstContent()) { @@ -127,7 +128,7 @@ public InboundMessage finishAggregation() throws IOException { } final BreakerControl breakerControl = new BreakerControl(circuitBreaker); - final InboundMessage aggregated = new InboundMessage(currentHeader, releasableContent, breakerControl); + final NativeInboundMessage aggregated = new NativeInboundMessage(currentHeader, releasableContent, breakerControl); boolean success = false; try { if (aggregated.getHeader().needsToReadVariableHeader()) { @@ -142,7 +143,7 @@ public InboundMessage finishAggregation() throws IOException { if (isShortCircuited()) { aggregated.close(); success = true; - return new InboundMessage(aggregated.getHeader(), aggregationException); + return new NativeInboundMessage(aggregated.getHeader(), aggregationException); } else { success = true; return aggregated; diff --git a/server/src/main/java/org/opensearch/transport/InboundMessage.java b/server/src/main/java/org/opensearch/transport/InboundMessage.java deleted file mode 100644 index 5c68257557061..0000000000000 --- a/server/src/main/java/org/opensearch/transport/InboundMessage.java +++ /dev/null @@ -1,108 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - */ - -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -/* - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.transport; - -import org.opensearch.common.annotation.DeprecatedApi; -import org.opensearch.common.bytes.ReleasableBytesReference; -import org.opensearch.common.lease.Releasable; -import org.opensearch.core.common.io.stream.StreamInput; -import org.opensearch.transport.nativeprotocol.NativeInboundMessage; - -import java.io.IOException; - -/** - * Inbound data as a message - * This api is deprecated, please use {@link org.opensearch.transport.nativeprotocol.NativeInboundMessage} instead. - * @opensearch.api - */ -@DeprecatedApi(since = "2.14.0") -public class InboundMessage implements Releasable, ProtocolInboundMessage { - - private final NativeInboundMessage nativeInboundMessage; - - public InboundMessage(Header header, ReleasableBytesReference content, Releasable breakerRelease) { - this.nativeInboundMessage = new NativeInboundMessage(header, content, breakerRelease); - } - - public InboundMessage(Header header, Exception exception) { - this.nativeInboundMessage = new NativeInboundMessage(header, exception); - } - - public InboundMessage(Header header, boolean isPing) { - this.nativeInboundMessage = new NativeInboundMessage(header, isPing); - } - - public Header getHeader() { - return this.nativeInboundMessage.getHeader(); - } - - public int getContentLength() { - return this.nativeInboundMessage.getContentLength(); - } - - public Exception getException() { - return this.nativeInboundMessage.getException(); - } - - public boolean isPing() { - return this.nativeInboundMessage.isPing(); - } - - public boolean isShortCircuit() { - return this.nativeInboundMessage.getException() != null; - } - - public Releasable takeBreakerReleaseControl() { - return this.nativeInboundMessage.takeBreakerReleaseControl(); - } - - public StreamInput openOrGetStreamInput() throws IOException { - return this.nativeInboundMessage.openOrGetStreamInput(); - } - - @Override - public void close() { - this.nativeInboundMessage.close(); - } - - @Override - public String toString() { - return this.nativeInboundMessage.toString(); - } - - @Override - public String getProtocol() { - return this.nativeInboundMessage.getProtocol(); - } - -} diff --git a/server/src/main/java/org/opensearch/transport/NativeMessageHandler.java b/server/src/main/java/org/opensearch/transport/NativeMessageHandler.java index 861b95a8098f2..c5b65f9eb7a11 100644 --- a/server/src/main/java/org/opensearch/transport/NativeMessageHandler.java +++ b/server/src/main/java/org/opensearch/transport/NativeMessageHandler.java @@ -51,6 +51,7 @@ import org.opensearch.telemetry.tracing.Tracer; import org.opensearch.telemetry.tracing.channels.TraceableTcpTransportChannel; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.nativeprotocol.NativeInboundMessage; import java.io.EOFException; import java.io.IOException; @@ -111,7 +112,7 @@ public void messageReceived( long slowLogThresholdMs, TransportMessageListener messageListener ) throws IOException { - InboundMessage inboundMessage = (InboundMessage) message; + NativeInboundMessage inboundMessage = (NativeInboundMessage) message; TransportLogger.logInboundMessage(channel, inboundMessage); if (inboundMessage.isPing()) { keepAlive.receiveKeepAlive(channel); @@ -122,7 +123,7 @@ public void messageReceived( private void handleMessage( TcpChannel channel, - InboundMessage message, + NativeInboundMessage message, long startTime, long slowLogThresholdMs, TransportMessageListener messageListener @@ -194,7 +195,7 @@ private Map> extractHeaders(Map heade private void handleRequest( TcpChannel channel, Header header, - InboundMessage message, + NativeInboundMessage message, TransportMessageListener messageListener ) throws IOException { final String action = header.getActionName(); diff --git a/server/src/main/java/org/opensearch/transport/TcpTransport.java b/server/src/main/java/org/opensearch/transport/TcpTransport.java index e32bba5e836d3..8ba0178577232 100644 --- a/server/src/main/java/org/opensearch/transport/TcpTransport.java +++ b/server/src/main/java/org/opensearch/transport/TcpTransport.java @@ -761,18 +761,6 @@ protected void serverAcceptedChannel(TcpChannel channel) { */ protected abstract void stopInternal(); - /** - * @deprecated use {@link #inboundMessage(TcpChannel, ProtocolInboundMessage)} - * Handles inbound message that has been decoded. - * - * @param channel the channel the message is from - * @param message the message - */ - @Deprecated(since = "2.14.0", forRemoval = true) - public void inboundMessage(TcpChannel channel, InboundMessage message) { - inboundMessage(channel, (ProtocolInboundMessage) message); - } - /** * Handles inbound message that has been decoded. * diff --git a/server/src/main/java/org/opensearch/transport/TransportLogger.java b/server/src/main/java/org/opensearch/transport/TransportLogger.java index 997b3bb5ba18e..e780f643aafd7 100644 --- a/server/src/main/java/org/opensearch/transport/TransportLogger.java +++ b/server/src/main/java/org/opensearch/transport/TransportLogger.java @@ -40,6 +40,7 @@ import org.opensearch.core.common.io.stream.InputStreamStreamInput; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.compress.CompressorRegistry; +import org.opensearch.transport.nativeprotocol.NativeInboundMessage; import java.io.IOException; @@ -64,7 +65,7 @@ static void logInboundMessage(TcpChannel channel, BytesReference message) { } } - static void logInboundMessage(TcpChannel channel, InboundMessage message) { + static void logInboundMessage(TcpChannel channel, NativeInboundMessage message) { if (logger.isTraceEnabled()) { try { String logMessage = format(channel, message, "READ"); @@ -136,7 +137,7 @@ private static String format(TcpChannel channel, BytesReference message, String return sb.toString(); } - private static String format(TcpChannel channel, InboundMessage message, String event) throws IOException { + private static String format(TcpChannel channel, NativeInboundMessage message, String event) throws IOException { final StringBuilder sb = new StringBuilder(); sb.append(channel); diff --git a/server/src/main/java/org/opensearch/transport/nativeprotocol/NativeInboundBytesHandler.java b/server/src/main/java/org/opensearch/transport/nativeprotocol/NativeInboundBytesHandler.java index a8a4c0da7ec0f..97981aeb6736e 100644 --- a/server/src/main/java/org/opensearch/transport/nativeprotocol/NativeInboundBytesHandler.java +++ b/server/src/main/java/org/opensearch/transport/nativeprotocol/NativeInboundBytesHandler.java @@ -16,7 +16,6 @@ import org.opensearch.transport.InboundAggregator; import org.opensearch.transport.InboundBytesHandler; import org.opensearch.transport.InboundDecoder; -import org.opensearch.transport.InboundMessage; import org.opensearch.transport.ProtocolInboundMessage; import org.opensearch.transport.StatsTracker; import org.opensearch.transport.TcpChannel; @@ -32,7 +31,7 @@ public class NativeInboundBytesHandler implements InboundBytesHandler { private static final ThreadLocal> fragmentList = ThreadLocal.withInitial(ArrayList::new); - private static final InboundMessage PING_MESSAGE = new InboundMessage(null, true); + private static final NativeInboundMessage PING_MESSAGE = new NativeInboundMessage(null, true); private final ArrayDeque pending; private final InboundDecoder decoder; @@ -152,7 +151,7 @@ private void forwardFragments( messageHandler.accept(channel, PING_MESSAGE); } else if (fragment == InboundDecoder.END_CONTENT) { assert aggregator.isAggregating(); - try (InboundMessage aggregated = aggregator.finishAggregation()) { + try (NativeInboundMessage aggregated = aggregator.finishAggregation()) { statsTracker.markMessageReceived(); messageHandler.accept(channel, aggregated); } diff --git a/server/src/test/java/org/opensearch/transport/InboundAggregatorTests.java b/server/src/test/java/org/opensearch/transport/InboundAggregatorTests.java index 2dd98a8efe2a3..4ac78366360d7 100644 --- a/server/src/test/java/org/opensearch/transport/InboundAggregatorTests.java +++ b/server/src/test/java/org/opensearch/transport/InboundAggregatorTests.java @@ -42,6 +42,7 @@ import org.opensearch.core.common.breaker.CircuitBreakingException; import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.transport.nativeprotocol.NativeInboundMessage; import org.junit.Before; import java.io.IOException; @@ -107,7 +108,7 @@ public void testInboundAggregation() throws IOException { } // Signal EOS - InboundMessage aggregated = aggregator.finishAggregation(); + NativeInboundMessage aggregated = aggregator.finishAggregation(); assertThat(aggregated, notNullValue()); assertFalse(aggregated.isPing()); @@ -138,7 +139,7 @@ public void testInboundUnknownAction() throws IOException { assertEquals(0, content.refCount()); // Signal EOS - InboundMessage aggregated = aggregator.finishAggregation(); + NativeInboundMessage aggregated = aggregator.finishAggregation(); assertThat(aggregated, notNullValue()); assertTrue(aggregated.isShortCircuit()); @@ -161,7 +162,7 @@ public void testCircuitBreak() throws IOException { content1.close(); // Signal EOS - InboundMessage aggregated1 = aggregator.finishAggregation(); + NativeInboundMessage aggregated1 = aggregator.finishAggregation(); assertEquals(0, content1.refCount()); assertThat(aggregated1, notNullValue()); @@ -180,7 +181,7 @@ public void testCircuitBreak() throws IOException { content2.close(); // Signal EOS - InboundMessage aggregated2 = aggregator.finishAggregation(); + NativeInboundMessage aggregated2 = aggregator.finishAggregation(); assertEquals(1, content2.refCount()); assertThat(aggregated2, notNullValue()); @@ -199,7 +200,7 @@ public void testCircuitBreak() throws IOException { content3.close(); // Signal EOS - InboundMessage aggregated3 = aggregator.finishAggregation(); + NativeInboundMessage aggregated3 = aggregator.finishAggregation(); assertEquals(1, content3.refCount()); assertThat(aggregated3, notNullValue()); @@ -263,7 +264,7 @@ public void testFinishAggregationWillFinishHeader() throws IOException { content.close(); // Signal EOS - InboundMessage aggregated = aggregator.finishAggregation(); + NativeInboundMessage aggregated = aggregator.finishAggregation(); assertThat(aggregated, notNullValue()); assertFalse(header.needsToReadVariableHeader()); diff --git a/server/src/test/java/org/opensearch/transport/InboundHandlerTests.java b/server/src/test/java/org/opensearch/transport/InboundHandlerTests.java index 0d171e17e70e1..2dde27d62e759 100644 --- a/server/src/test/java/org/opensearch/transport/InboundHandlerTests.java +++ b/server/src/test/java/org/opensearch/transport/InboundHandlerTests.java @@ -56,6 +56,7 @@ import org.opensearch.test.VersionUtils; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.nativeprotocol.NativeInboundMessage; import org.junit.After; import org.junit.Before; @@ -142,7 +143,7 @@ public void testPing() throws Exception { ); requestHandlers.registerHandler(registry); - handler.inboundMessage(channel, new InboundMessage(null, true)); + handler.inboundMessage(channel, new NativeInboundMessage(null, true)); if (channel.isServerChannel()) { BytesReference ping = channel.getMessageCaptor().get(); assertEquals('E', ping.get(0)); @@ -208,7 +209,11 @@ public TestResponse read(StreamInput in) throws IOException { BytesReference fullRequestBytes = request.serialize(new BytesStreamOutput()); BytesReference requestContent = fullRequestBytes.slice(headerSize, fullRequestBytes.length() - headerSize); Header requestHeader = new Header(fullRequestBytes.length() - 6, requestId, TransportStatus.setRequest((byte) 0), version); - InboundMessage requestMessage = new InboundMessage(requestHeader, ReleasableBytesReference.wrap(requestContent), () -> {}); + NativeInboundMessage requestMessage = new NativeInboundMessage( + requestHeader, + ReleasableBytesReference.wrap(requestContent), + () -> {} + ); requestHeader.finishParsingHeader(requestMessage.openOrGetStreamInput()); handler.inboundMessage(channel, requestMessage); @@ -229,7 +234,11 @@ public TestResponse read(StreamInput in) throws IOException { BytesReference fullResponseBytes = channel.getMessageCaptor().get(); BytesReference responseContent = fullResponseBytes.slice(headerSize, fullResponseBytes.length() - headerSize); Header responseHeader = new Header(fullResponseBytes.length() - 6, requestId, responseStatus, version); - InboundMessage responseMessage = new InboundMessage(responseHeader, ReleasableBytesReference.wrap(responseContent), () -> {}); + NativeInboundMessage responseMessage = new NativeInboundMessage( + responseHeader, + ReleasableBytesReference.wrap(responseContent), + () -> {} + ); responseHeader.finishParsingHeader(responseMessage.openOrGetStreamInput()); handler.inboundMessage(channel, responseMessage); @@ -256,7 +265,7 @@ public void testSendsErrorResponseToHandshakeFromCompatibleVersion() throws Exce TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0)), remoteVersion ); - final InboundMessage requestMessage = unreadableInboundHandshake(remoteVersion, requestHeader); + final NativeInboundMessage requestMessage = unreadableInboundHandshake(remoteVersion, requestHeader); requestHeader.actionName = TransportHandshaker.HANDSHAKE_ACTION_NAME; requestHeader.headers = Tuple.tuple(Map.of(), Map.of()); requestHeader.features = Set.of(); @@ -296,7 +305,7 @@ public void testClosesChannelOnErrorInHandshakeWithIncompatibleVersion() throws TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0)), remoteVersion ); - final InboundMessage requestMessage = unreadableInboundHandshake(remoteVersion, requestHeader); + final NativeInboundMessage requestMessage = unreadableInboundHandshake(remoteVersion, requestHeader); requestHeader.actionName = TransportHandshaker.HANDSHAKE_ACTION_NAME; requestHeader.headers = Tuple.tuple(Map.of(), Map.of()); requestHeader.features = Set.of(); @@ -327,13 +336,17 @@ public void testLogsSlowInboundProcessing() throws Exception { TransportStatus.setRequest(TransportStatus.setHandshake((byte) 0)), remoteVersion ); - final InboundMessage requestMessage = new InboundMessage(requestHeader, ReleasableBytesReference.wrap(BytesArray.EMPTY), () -> { - try { - TimeUnit.SECONDS.sleep(1L); - } catch (InterruptedException e) { - throw new AssertionError(e); + final NativeInboundMessage requestMessage = new NativeInboundMessage( + requestHeader, + ReleasableBytesReference.wrap(BytesArray.EMPTY), + () -> { + try { + TimeUnit.SECONDS.sleep(1L); + } catch (InterruptedException e) { + throw new AssertionError(e); + } } - }); + ); requestHeader.actionName = TransportHandshaker.HANDSHAKE_ACTION_NAME; requestHeader.headers = Tuple.tuple(Collections.emptyMap(), Collections.emptyMap()); requestHeader.features = Set.of(); @@ -407,7 +420,11 @@ public void onResponseSent(long requestId, String action, Exception error) { BytesReference fullRequestBytes = BytesReference.fromByteBuffer((ByteBuffer) buffer.flip()); BytesReference requestContent = fullRequestBytes.slice(headerSize, fullRequestBytes.length() - headerSize); Header requestHeader = new Header(fullRequestBytes.length() - 6, requestId, TransportStatus.setRequest((byte) 0), version); - InboundMessage requestMessage = new InboundMessage(requestHeader, ReleasableBytesReference.wrap(requestContent), () -> {}); + NativeInboundMessage requestMessage = new NativeInboundMessage( + requestHeader, + ReleasableBytesReference.wrap(requestContent), + () -> {} + ); requestHeader.finishParsingHeader(requestMessage.openOrGetStreamInput()); handler.inboundMessage(channel, requestMessage); @@ -474,7 +491,11 @@ public void onResponseSent(long requestId, String action, Exception error) { // Create the request payload by intentionally stripping 1 byte away BytesReference requestContent = fullRequestBytes.slice(headerSize, fullRequestBytes.length() - headerSize - 1); Header requestHeader = new Header(fullRequestBytes.length() - 6, requestId, TransportStatus.setRequest((byte) 0), version); - InboundMessage requestMessage = new InboundMessage(requestHeader, ReleasableBytesReference.wrap(requestContent), () -> {}); + NativeInboundMessage requestMessage = new NativeInboundMessage( + requestHeader, + ReleasableBytesReference.wrap(requestContent), + () -> {} + ); requestHeader.finishParsingHeader(requestMessage.openOrGetStreamInput()); handler.inboundMessage(channel, requestMessage); @@ -540,7 +561,11 @@ public TestResponse read(StreamInput in) throws IOException { BytesReference fullRequestBytes = request.serialize(new BytesStreamOutput()); BytesReference requestContent = fullRequestBytes.slice(headerSize, fullRequestBytes.length() - headerSize); Header requestHeader = new Header(fullRequestBytes.length() - 6, requestId, TransportStatus.setRequest((byte) 0), version); - InboundMessage requestMessage = new InboundMessage(requestHeader, ReleasableBytesReference.wrap(requestContent), () -> {}); + NativeInboundMessage requestMessage = new NativeInboundMessage( + requestHeader, + ReleasableBytesReference.wrap(requestContent), + () -> {} + ); requestHeader.finishParsingHeader(requestMessage.openOrGetStreamInput()); handler.inboundMessage(channel, requestMessage); @@ -562,7 +587,11 @@ public TestResponse read(StreamInput in) throws IOException { BytesReference fullResponseBytes = BytesReference.fromByteBuffer((ByteBuffer) buffer.flip()); BytesReference responseContent = fullResponseBytes.slice(headerSize, fullResponseBytes.length() - headerSize); Header responseHeader = new Header(fullResponseBytes.length() - 6, requestId, responseStatus, version); - InboundMessage responseMessage = new InboundMessage(responseHeader, ReleasableBytesReference.wrap(responseContent), () -> {}); + NativeInboundMessage responseMessage = new NativeInboundMessage( + responseHeader, + ReleasableBytesReference.wrap(responseContent), + () -> {} + ); responseHeader.finishParsingHeader(responseMessage.openOrGetStreamInput()); handler.inboundMessage(channel, responseMessage); @@ -628,7 +657,11 @@ public TestResponse read(StreamInput in) throws IOException { BytesReference fullRequestBytes = request.serialize(new BytesStreamOutput()); BytesReference requestContent = fullRequestBytes.slice(headerSize, fullRequestBytes.length() - headerSize); Header requestHeader = new Header(fullRequestBytes.length() - 6, requestId, TransportStatus.setRequest((byte) 0), version); - InboundMessage requestMessage = new InboundMessage(requestHeader, ReleasableBytesReference.wrap(requestContent), () -> {}); + NativeInboundMessage requestMessage = new NativeInboundMessage( + requestHeader, + ReleasableBytesReference.wrap(requestContent), + () -> {} + ); requestHeader.finishParsingHeader(requestMessage.openOrGetStreamInput()); handler.inboundMessage(channel, requestMessage); @@ -645,7 +678,11 @@ public TestResponse read(StreamInput in) throws IOException { // Create the response payload by intentionally stripping 1 byte away BytesReference responseContent = fullResponseBytes.slice(headerSize, fullResponseBytes.length() - headerSize - 1); Header responseHeader = new Header(fullResponseBytes.length() - 6, requestId, responseStatus, version); - InboundMessage responseMessage = new InboundMessage(responseHeader, ReleasableBytesReference.wrap(responseContent), () -> {}); + NativeInboundMessage responseMessage = new NativeInboundMessage( + responseHeader, + ReleasableBytesReference.wrap(responseContent), + () -> {} + ); responseHeader.finishParsingHeader(responseMessage.openOrGetStreamInput()); handler.inboundMessage(channel, responseMessage); @@ -654,8 +691,8 @@ public TestResponse read(StreamInput in) throws IOException { assertThat(exceptionCaptor.get().getMessage(), containsString("Failed to deserialize response from handler")); } - private static InboundMessage unreadableInboundHandshake(Version remoteVersion, Header requestHeader) { - return new InboundMessage(requestHeader, ReleasableBytesReference.wrap(BytesArray.EMPTY), () -> {}) { + private static NativeInboundMessage unreadableInboundHandshake(Version remoteVersion, Header requestHeader) { + return new NativeInboundMessage(requestHeader, ReleasableBytesReference.wrap(BytesArray.EMPTY), () -> {}) { @Override public StreamInput openOrGetStreamInput() { final StreamInput streamInput = new InputStreamStreamInput(new InputStream() { diff --git a/server/src/test/java/org/opensearch/transport/InboundPipelineTests.java b/server/src/test/java/org/opensearch/transport/InboundPipelineTests.java index 2dfe8a0dd8590..d54f7e6fd2c2b 100644 --- a/server/src/test/java/org/opensearch/transport/InboundPipelineTests.java +++ b/server/src/test/java/org/opensearch/transport/InboundPipelineTests.java @@ -49,6 +49,7 @@ import org.opensearch.core.common.bytes.BytesArray; import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.transport.nativeprotocol.NativeInboundMessage; import java.io.IOException; import java.util.ArrayList; @@ -74,7 +75,7 @@ public void testPipelineHandling() throws IOException { final List toRelease = new ArrayList<>(); final BiConsumer messageHandler = (c, m) -> { try { - InboundMessage message = (InboundMessage) m; + NativeInboundMessage message = (NativeInboundMessage) m; final Header header = message.getHeader(); final MessageData actualData; final Version version = header.getVersion(); diff --git a/server/src/test/java/org/opensearch/transport/OutboundHandlerTests.java b/server/src/test/java/org/opensearch/transport/OutboundHandlerTests.java index 36ba409a2de03..ad7d4401af13c 100644 --- a/server/src/test/java/org/opensearch/transport/OutboundHandlerTests.java +++ b/server/src/test/java/org/opensearch/transport/OutboundHandlerTests.java @@ -53,6 +53,7 @@ import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.transport.nativeprotocol.NativeInboundMessage; import org.junit.After; import org.junit.Before; @@ -97,7 +98,7 @@ public void setUp() throws Exception { final InboundAggregator aggregator = new InboundAggregator(breaker, (Predicate) action -> true); pipeline = new InboundPipeline(statsTracker, millisSupplier, decoder, aggregator, (c, m) -> { try (BytesStreamOutput streamOutput = new BytesStreamOutput()) { - InboundMessage m1 = (InboundMessage) m; + NativeInboundMessage m1 = (NativeInboundMessage) m; Streams.copy(m1.openOrGetStreamInput(), streamOutput); message.set(new Tuple<>(m1.getHeader(), streamOutput.bytes())); } catch (IOException e) { From 0282e64bb25a8485526a1b6bf584ac2b9495f219 Mon Sep 17 00:00:00 2001 From: Daniel Widdis Date: Thu, 18 Apr 2024 23:05:39 -0700 Subject: [PATCH 10/15] Ignore BaseRestHandler unconsumed content check as it's always consumed (#13290) * Ignore BaseRestHandler unconsumed content check as it's always consumed Signed-off-by: Daniel Widdis * Remove comment, continue to ignore content on Force Merge Signed-off-by: Daniel Widdis * Remove no-body test from RestDeletePitActionTests Signed-off-by: Daniel Widdis --------- Signed-off-by: Daniel Widdis --- CHANGELOG.md | 1 + .../org/opensearch/rest/BaseRestHandler.java | 4 - .../forcemerge/RestForceMergeActionTests.java | 22 ------ .../opensearch/rest/BaseRestHandlerTests.java | 79 ------------------- .../search/pit/RestDeletePitActionTests.java | 25 ------ 5 files changed, 1 insertion(+), 130 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5efcfee3d9d9d..6d8af16db72a7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -59,6 +59,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Enabled mockTelemetryPlugin for IT and fixed OOM issues ([#13054](https://github.com/opensearch-project/OpenSearch/pull/13054)) - Fix implement mark() and markSupported() in class FilterStreamInput ([#13098](https://github.com/opensearch-project/OpenSearch/pull/13098)) - Fix snapshot _status API to return correct status for partial snapshots ([#12812](https://github.com/opensearch-project/OpenSearch/pull/12812)) +- Ignore BaseRestHandler unconsumed content check as it's always consumed. ([#13290](https://github.com/opensearch-project/OpenSearch/pull/13290)) ### Security diff --git a/server/src/main/java/org/opensearch/rest/BaseRestHandler.java b/server/src/main/java/org/opensearch/rest/BaseRestHandler.java index 3552e32022b2c..fc150405747ec 100644 --- a/server/src/main/java/org/opensearch/rest/BaseRestHandler.java +++ b/server/src/main/java/org/opensearch/rest/BaseRestHandler.java @@ -122,10 +122,6 @@ public final void handleRequest(RestRequest request, RestChannel channel, NodeCl throw new IllegalArgumentException(unrecognized(request, unconsumedParams, candidateParams, "parameter")); } - if (request.hasContent() && request.isContentConsumed() == false) { - throw new IllegalArgumentException("request [" + request.method() + " " + request.path() + "] does not support having a body"); - } - usageCount.increment(); // execute the action action.accept(channel); diff --git a/server/src/test/java/org/opensearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java b/server/src/test/java/org/opensearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java index b09e592922ed9..01d72b78a679e 100644 --- a/server/src/test/java/org/opensearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java +++ b/server/src/test/java/org/opensearch/action/admin/indices/forcemerge/RestForceMergeActionTests.java @@ -32,14 +32,9 @@ package org.opensearch.action.admin.indices.forcemerge; -import org.opensearch.client.node.NodeClient; -import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.core.common.bytes.BytesArray; -import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.rest.RestRequest; import org.opensearch.rest.action.admin.indices.RestForceMergeAction; -import org.opensearch.test.rest.FakeRestChannel; import org.opensearch.test.rest.FakeRestRequest; import org.opensearch.test.rest.RestActionTestCase; import org.junit.Before; @@ -47,9 +42,6 @@ import java.util.HashMap; import java.util.Map; -import static org.hamcrest.Matchers.equalTo; -import static org.mockito.Mockito.mock; - public class RestForceMergeActionTests extends RestActionTestCase { @Before @@ -57,20 +49,6 @@ public void setUpAction() { controller().registerHandler(new RestForceMergeAction()); } - public void testBodyRejection() throws Exception { - final RestForceMergeAction handler = new RestForceMergeAction(); - String json = JsonXContent.contentBuilder().startObject().field("max_num_segments", 1).endObject().toString(); - final FakeRestRequest request = new FakeRestRequest.Builder(NamedXContentRegistry.EMPTY).withContent( - new BytesArray(json), - MediaTypeRegistry.JSON - ).withPath("/_forcemerge").build(); - IllegalArgumentException e = expectThrows( - IllegalArgumentException.class, - () -> handler.handleRequest(request, new FakeRestChannel(request, randomBoolean(), 1), mock(NodeClient.class)) - ); - assertThat(e.getMessage(), equalTo("request [GET /_forcemerge] does not support having a body")); - } - public void testDeprecationMessage() { final Map params = new HashMap<>(); params.put("only_expunge_deletes", Boolean.TRUE.toString()); diff --git a/server/src/test/java/org/opensearch/rest/BaseRestHandlerTests.java b/server/src/test/java/org/opensearch/rest/BaseRestHandlerTests.java index ce929e64d8960..45653e9d8e4d6 100644 --- a/server/src/test/java/org/opensearch/rest/BaseRestHandlerTests.java +++ b/server/src/test/java/org/opensearch/rest/BaseRestHandlerTests.java @@ -35,10 +35,6 @@ import org.opensearch.client.node.NodeClient; import org.opensearch.common.Table; import org.opensearch.common.settings.Settings; -import org.opensearch.common.xcontent.json.JsonXContent; -import org.opensearch.core.common.bytes.BytesArray; -import org.opensearch.core.xcontent.MediaTypeRegistry; -import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.rest.RestHandler.ReplacedRoute; import org.opensearch.rest.RestHandler.Route; import org.opensearch.rest.RestRequest.Method; @@ -281,81 +277,6 @@ public String getName() { assertTrue(executed.get()); } - public void testConsumedBody() throws Exception { - final AtomicBoolean executed = new AtomicBoolean(); - final BaseRestHandler handler = new BaseRestHandler() { - @Override - protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - request.content(); - return channel -> executed.set(true); - } - - @Override - public String getName() { - return "test_consumed_body"; - } - }; - - try (XContentBuilder builder = JsonXContent.contentBuilder().startObject().endObject()) { - final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withContent( - new BytesArray(builder.toString()), - MediaTypeRegistry.JSON - ).build(); - final RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); - handler.handleRequest(request, channel, mockClient); - assertTrue(executed.get()); - } - } - - public void testUnconsumedNoBody() throws Exception { - final AtomicBoolean executed = new AtomicBoolean(); - final BaseRestHandler handler = new BaseRestHandler() { - @Override - protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - return channel -> executed.set(true); - } - - @Override - public String getName() { - return "test_unconsumed_body"; - } - }; - - final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).build(); - final RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); - handler.handleRequest(request, channel, mockClient); - assertTrue(executed.get()); - } - - public void testUnconsumedBody() throws IOException { - final AtomicBoolean executed = new AtomicBoolean(); - final BaseRestHandler handler = new BaseRestHandler() { - @Override - protected RestChannelConsumer prepareRequest(final RestRequest request, final NodeClient client) throws IOException { - return channel -> executed.set(true); - } - - @Override - public String getName() { - return "test_unconsumed_body"; - } - }; - - try (XContentBuilder builder = JsonXContent.contentBuilder().startObject().endObject()) { - final RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withContent( - new BytesArray(builder.toString()), - MediaTypeRegistry.JSON - ).build(); - final RestChannel channel = new FakeRestChannel(request, randomBoolean(), 1); - final IllegalArgumentException e = expectThrows( - IllegalArgumentException.class, - () -> handler.handleRequest(request, channel, mockClient) - ); - assertThat(e, hasToString(containsString("request [GET /] does not support having a body"))); - assertFalse(executed.get()); - } - } - public void testReplaceRoutesMethod() throws Exception { List routes = Arrays.asList(new Route(Method.GET, "/path/test"), new Route(Method.PUT, "/path2/test")); List replacedRoutes = RestHandler.replaceRoutes(routes, "/prefix", "/deprecatedPrefix"); diff --git a/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java b/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java index b60541825e3ed..448ba9e5a8cd7 100644 --- a/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java +++ b/server/src/test/java/org/opensearch/search/pit/RestDeletePitActionTests.java @@ -82,31 +82,6 @@ public void deletePits(DeletePitRequest request, ActionListener pitCalled = new SetOnce<>(); - try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) { - @Override - public void deletePits(DeletePitRequest request, ActionListener listener) { - pitCalled.set(true); - assertThat(request.getPitIds(), hasSize(1)); - assertThat(request.getPitIds().get(0), equalTo("_all")); - } - }) { - RestDeletePitAction action = new RestDeletePitAction(); - RestRequest request = new FakeRestRequest.Builder(xContentRegistry()).withContent( - new BytesArray("{\"pit_id\": [\"BODY\"]}"), - MediaTypeRegistry.JSON - ).withPath("/_all").build(); - FakeRestChannel channel = new FakeRestChannel(request, false, 0); - - IllegalArgumentException ex = expectThrows( - IllegalArgumentException.class, - () -> action.handleRequest(request, channel, nodeClient) - ); - assertTrue(ex.getMessage().contains("request [GET /_all] does not support having a body")); - } - } - public void testDeletePitQueryStringParamsShouldThrowException() { SetOnce pitCalled = new SetOnce<>(); try (NodeClient nodeClient = new NoOpNodeClient(this.getTestName()) { From b4692c897ec4b19161417147d29b116671616fe1 Mon Sep 17 00:00:00 2001 From: Andriy Redko Date: Fri, 19 Apr 2024 11:03:06 -0400 Subject: [PATCH 11/15] Reconsider the breaking changes check policy to detect breaking changes against released versions (#13292) * Reconsider the breaking changes check policy to detect breaking changes against released versions Signed-off-by: Andriy Redko * Add an ability to provide the target version to compare with for japicmp tasks Signed-off-by: Andriy Redko * Add documentation for japicmp tasks Signed-off-by: Andriy Redko --------- Signed-off-by: Andriy Redko --- CHANGELOG.md | 1 + DEVELOPER_GUIDE.md | 15 +++++++++++ server/build.gradle | 65 ++++++++++++++++++++++++++++----------------- 3 files changed, 56 insertions(+), 25 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 6d8af16db72a7..dadfcbfbd8b05 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -45,6 +45,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Improve built-in secure transports support ([#12907](https://github.com/opensearch-project/OpenSearch/pull/12907)) - Update links to documentation in rest-api-spec ([#13043](https://github.com/opensearch-project/OpenSearch/pull/13043)) - Refactoring globMatch using simpleMatchWithNormalizedStrings from Regex ([#13104](https://github.com/opensearch-project/OpenSearch/pull/13104)) +- [BWC and API enforcement] Reconsider the breaking changes check policy to detect breaking changes against released versions ([#13292](https://github.com/opensearch-project/OpenSearch/pull/13292)) ### Deprecated diff --git a/DEVELOPER_GUIDE.md b/DEVELOPER_GUIDE.md index f0851fc58d444..92ef71b92da7e 100644 --- a/DEVELOPER_GUIDE.md +++ b/DEVELOPER_GUIDE.md @@ -57,6 +57,7 @@ - [Developer API](#developer-api) - [User API](#user-api) - [Experimental Development](#experimental-development) + - [API Compatibility Checks](#api-compatibility-checks) - [Backports](#backports) - [LineLint](#linelint) - [Lucene Snapshots](#lucene-snapshots) @@ -607,6 +608,20 @@ a LTS feature but with additional guard rails and communication mechanisms to si release, or be removed altogether. Any Developer or User APIs implemented along with the experimental feature should be marked with `@ExperimentalApi` (or documented as `@opensearch.experimental`) annotation to signal the implementation is not subject to LTS and does not follow backwards compatibility guidelines. +#### API Compatibility Checks + +The compatibility checks for public APIs are performed using [japicmp](https://siom79.github.io/japicmp/) and are available as separate Gradle tasks (those are run on demand at the moment): + +``` +./gradlew japicmp +``` + +By default, the API compatibility checks are run against the latest released version of the OpenSearch, however the target version to compare to could be provided using system property during the build, fe.: + +``` +./gradlew japicmp -Djapicmp.compare.version=2.14.0-SNAPSHOT +``` + ### Backports The Github workflow in [`backport.yml`](.github/workflows/backport.yml) creates backport PRs automatically when the original PR with an appropriate label `backport ` is merged to main with the backport workflow run successfully on the PR. For example, if a PR on main needs to be backported to `1.x` branch, add a label `backport 1.x` to the PR and make sure the backport workflow runs on the PR along with other checks. Once this PR is merged to main, the workflow will create a backport PR to the `1.x` branch. diff --git a/server/build.gradle b/server/build.gradle index a076a6bee36bf..fc383f940991c 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -173,6 +173,22 @@ tasks.named("testingConventions").configure { } } +// Set to current version by default +def japicmpCompareTarget = System.getProperty("japicmp.compare.version") +if (japicmpCompareTarget == null) { /* use latest released version */ + // Read the list from maven central. + // Fetch the metadata and parse the xml into Version instances, pick the latest one + japicmpCompareTarget = new URL('https://repo1.maven.org/maven2/org/opensearch/opensearch/maven-metadata.xml').openStream().withStream { s -> + new XmlParser().parse(s) + .versioning.versions.version + .collect { it.text() }.findAll { it ==~ /\d+\.\d+\.\d+/ } + .collect { org.opensearch.gradle.Version.fromString(it) } + .toSorted() + .last() + .toString() + } +} + def generateModulesList = tasks.register("generateModulesList") { List modules = project(':modules').subprojects.collect { it.name } File modulesFile = new File(buildDir, 'generated-resources/modules.txt') @@ -380,9 +396,10 @@ tasks.named("sourcesJar").configure { } } -/** Compares the current build against a snapshot build */ +/** Compares the current build against a laltest released version or the version supplied through 'japicmp.compare.version' system property */ tasks.register("japicmp", me.champeau.gradle.japicmp.JapicmpTask) { - oldClasspath.from(files("${buildDir}/snapshot/opensearch-${version}.jar")) + logger.info("Comparing public APIs from ${version} to ${japicmpCompareTarget}") + oldClasspath.from(files("${buildDir}/japicmp-target/opensearch-${japicmpCompareTarget}.jar")) newClasspath.from(tasks.named('jar')) onlyModified = true failOnModification = true @@ -390,50 +407,48 @@ tasks.register("japicmp", me.champeau.gradle.japicmp.JapicmpTask) { annotationIncludes = ['@org.opensearch.common.annotation.PublicApi', '@org.opensearch.common.annotation.DeprecatedApi'] txtOutputFile = layout.buildDirectory.file("reports/java-compatibility/report.txt") htmlOutputFile = layout.buildDirectory.file("reports/java-compatibility/report.html") - dependsOn downloadSnapshot + dependsOn downloadJapicmpCompareTarget } /** If the Java API Comparison task failed, print a hint if the change should be merged from its target branch */ gradle.taskGraph.afterTask { Task task, TaskState state -> if (task.name == 'japicmp' && state.failure != null) { - def sha = getGitShaFromJar("${buildDir}/snapshot/opensearch-${version}.jar") - logger.info("Incompatiable java api from snapshot jar built off of commit ${sha}") - - if (!inHistory(sha)) { - logger.warn('\u001B[33mPlease merge from the target branch and run this task again.\u001B[0m') - } + logger.info("Public APIs changes incompatiable with ${japicmpCompareTarget} target have been detected") } } -/** Downloads latest snapshot from maven repository */ -tasks.register("downloadSnapshot", Copy) { +/** Downloads latest released version from maven repository */ +tasks.register("downloadJapicmpCompareTarget", Copy) { def mavenSnapshotRepoUrl = "https://aws.oss.sonatype.org/content/repositories/snapshots/" def groupId = "org.opensearch" def artifactId = "opensearch" - def repos = project.getRepositories(); - MavenArtifactRepository opensearchRepo = repos.maven(repo -> { - repo.setName("opensearch-snapshots"); - repo.setUrl(mavenSnapshotRepoUrl); - }); - - repos.exclusiveContent(exclusiveRepo -> { - exclusiveRepo.filter(descriptor -> descriptor.includeGroup(groupId)); - exclusiveRepo.forRepositories(opensearchRepo); - }); + // Add repository for snapshot artifacts if japicmp compare target version is snapshot + if (japicmpCompareTarget.endsWith("-SNAPSHOT")) { + def repos = project.getRepositories(); + MavenArtifactRepository opensearchRepo = repos.maven(repo -> { + repo.setName("opensearch-snapshots"); + repo.setUrl(mavenSnapshotRepoUrl); + }); + + repos.exclusiveContent(exclusiveRepo -> { + exclusiveRepo.filter(descriptor -> descriptor.includeGroup(groupId)); + exclusiveRepo.forRepositories(opensearchRepo); + }); + } configurations { - snapshotArtifact { + japicmpCompareTargetArtifact { exclude group: 'org.apache.lucene' } } dependencies { - snapshotArtifact("${groupId}:${artifactId}:${version}:") + japicmpCompareTargetArtifact("${groupId}:${artifactId}:${japicmpCompareTarget}:") } - from configurations.snapshotArtifact - into "$buildDir/snapshot" + from configurations.japicmpCompareTargetArtifact + into "$buildDir/japicmp-target" } /** Check if the sha is in the current history */ From bfbdc9f43ebfcd55c0b2a5a96d9e2a73bd66ff04 Mon Sep 17 00:00:00 2001 From: Rishabh Maurya Date: Sun, 21 Apr 2024 12:32:47 -0700 Subject: [PATCH 12/15] Fix the flaky test for derived fields highlighter test (#13313) Signed-off-by: Rishabh Maurya --- .../DerivedFieldFetchAndHighlightTests.java | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/DerivedFieldFetchAndHighlightTests.java b/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/DerivedFieldFetchAndHighlightTests.java index 28d97c74d9445..92127da9654aa 100644 --- a/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/DerivedFieldFetchAndHighlightTests.java +++ b/server/src/test/java/org/opensearch/search/fetch/subphase/highlight/DerivedFieldFetchAndHighlightTests.java @@ -144,17 +144,17 @@ public void testDerivedFieldFromIndexMapping() throws IOException { // create a fetch context to be used by HighlightPhase processor FetchContext fetchContext = mock(FetchContext.class); - when(fetchContext.mapperService()).thenReturn(mockShardContext.getMapperService()); + when(fetchContext.mapperService()).thenReturn(mapperService); when(fetchContext.getQueryShardContext()).thenReturn(mockShardContext); when(fetchContext.getIndexSettings()).thenReturn(indexService.getIndexSettings()); when(fetchContext.searcher()).thenReturn( new ContextIndexSearcher( - searcher.getIndexReader(), - searcher.getSimilarity(), - searcher.getQueryCache(), - searcher.getQueryCachingPolicy(), + reader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + IndexSearcher.getDefaultQueryCachingPolicy(), true, - searcher.getExecutor(), + null, null ) ); @@ -253,17 +253,17 @@ public void testDerivedFieldFromSearchMapping() throws IOException { // create a fetch context to be used by HighlightPhase processor FetchContext fetchContext = mock(FetchContext.class); - when(fetchContext.mapperService()).thenReturn(mockShardContext.getMapperService()); + when(fetchContext.mapperService()).thenReturn(mapperService); when(fetchContext.getQueryShardContext()).thenReturn(mockShardContext); when(fetchContext.getIndexSettings()).thenReturn(indexService.getIndexSettings()); when(fetchContext.searcher()).thenReturn( new ContextIndexSearcher( - searcher.getIndexReader(), - searcher.getSimilarity(), - searcher.getQueryCache(), - searcher.getQueryCachingPolicy(), + reader, + IndexSearcher.getDefaultSimilarity(), + IndexSearcher.getDefaultQueryCache(), + IndexSearcher.getDefaultQueryCachingPolicy(), true, - searcher.getExecutor(), + null, null ) ); From 98009ed9b83d43377e6b3ffa610db7154ff7d200 Mon Sep 17 00:00:00 2001 From: Srikanth Padakanti Date: Mon, 22 Apr 2024 03:02:30 -0500 Subject: [PATCH 13/15] Improve error messages for `_stats` with closed indices (#13012) * Improve the error messages for _stats with closed indices Signed-off-by: srikanth padakanti * Edit the changelog text Signed-off-by: srikanth padakanti * correct the test cases Signed-off-by: srikanth padakanti * fix to throw appropriate error message Signed-off-by: srikanth padakanti * fix to throw appropriate error message Signed-off-by: srikanth padakanti --------- Signed-off-by: srikanth padakanti Signed-off-by: Srikanth Padakanti --- CHANGELOG.md | 1 + .../index/rankeval/RankEvalRequestIT.java | 2 +- .../metadata/IndexNameExpressionResolver.java | 8 +++++++- .../IndexNameExpressionResolverTests.java | 19 +++++++++++++++++++ 4 files changed, 28 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index dadfcbfbd8b05..0594cfea2c89d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -60,6 +60,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - Enabled mockTelemetryPlugin for IT and fixed OOM issues ([#13054](https://github.com/opensearch-project/OpenSearch/pull/13054)) - Fix implement mark() and markSupported() in class FilterStreamInput ([#13098](https://github.com/opensearch-project/OpenSearch/pull/13098)) - Fix snapshot _status API to return correct status for partial snapshots ([#12812](https://github.com/opensearch-project/OpenSearch/pull/12812)) +- Improve the error messages for _stats with closed indices ([#13012](https://github.com/opensearch-project/OpenSearch/pull/13012)) - Ignore BaseRestHandler unconsumed content check as it's always consumed. ([#13290](https://github.com/opensearch-project/OpenSearch/pull/13290)) ### Security diff --git a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java index 488c2e33648e7..0e3db9d1c78b3 100644 --- a/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java +++ b/modules/rank-eval/src/internalClusterTest/java/org/opensearch/index/rankeval/RankEvalRequestIT.java @@ -345,7 +345,7 @@ public void testIndicesOptions() { request.indicesOptions(IndicesOptions.fromParameters("closed", null, null, "false", SearchRequest.DEFAULT_INDICES_OPTIONS)); response = client().execute(RankEvalAction.INSTANCE, request).actionGet(); assertEquals(1, response.getFailures().size()); - assertThat(response.getFailures().get("amsterdam_query"), instanceOf(IndexClosedException.class)); + assertThat(response.getFailures().get("amsterdam_query"), instanceOf(IllegalArgumentException.class)); // test allow_no_indices request = new RankEvalRequest(task, new String[] { "bad*" }); diff --git a/server/src/main/java/org/opensearch/cluster/metadata/IndexNameExpressionResolver.java b/server/src/main/java/org/opensearch/cluster/metadata/IndexNameExpressionResolver.java index 9a3b569a7ac3d..24ff83d638d4b 100644 --- a/server/src/main/java/org/opensearch/cluster/metadata/IndexNameExpressionResolver.java +++ b/server/src/main/java/org/opensearch/cluster/metadata/IndexNameExpressionResolver.java @@ -380,7 +380,13 @@ private void checkSystemIndexAccess(Context context, Metadata metadata, Set Date: Mon, 22 Apr 2024 18:07:57 +0530 Subject: [PATCH 14/15] [Remote Store] Throw IOException when remote is not in sync (#13282) Signed-off-by: Gaurav Bafna --- .../indices/create/RemoteCloneIndexIT.java | 26 +++++++--- .../RemotePrimaryRelocationIT.java | 51 +++++++++---------- .../opensearch/index/shard/IndexShard.java | 12 +++-- .../opensearch/index/shard/StoreRecovery.java | 15 ------ 4 files changed, 52 insertions(+), 52 deletions(-) diff --git a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/RemoteCloneIndexIT.java b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/RemoteCloneIndexIT.java index f50e8fd0a38cf..98c2a3a1581b8 100644 --- a/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/RemoteCloneIndexIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/action/admin/indices/create/RemoteCloneIndexIT.java @@ -40,13 +40,17 @@ */ import org.opensearch.Version; +import org.opensearch.action.admin.cluster.health.ClusterHealthRequest; +import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; import org.opensearch.action.admin.indices.settings.get.GetSettingsResponse; import org.opensearch.action.admin.indices.shrink.ResizeType; import org.opensearch.action.admin.indices.stats.IndicesStatsResponse; +import org.opensearch.client.Requests; import org.opensearch.cluster.routing.allocation.decider.EnableAllocationDecider; import org.opensearch.common.settings.Settings; import org.opensearch.core.xcontent.MediaTypeRegistry; import org.opensearch.index.query.TermsQueryBuilder; +import org.opensearch.indices.recovery.RecoverySettings; import org.opensearch.remotestore.RemoteStoreBaseIntegTestCase; import org.opensearch.test.VersionUtils; @@ -156,7 +160,11 @@ public void testCreateCloneIndexFailure() throws ExecutionException, Interrupted client().admin() .cluster() .prepareUpdateSettings() - .setTransientSettings(Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), "none")) + .setTransientSettings( + Settings.builder() + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), "none") + .put(RecoverySettings.INDICES_INTERNAL_REMOTE_UPLOAD_TIMEOUT.getKey(), "10s") + ) .get(); try { setFailRate(REPOSITORY_NAME, 100); @@ -168,9 +176,14 @@ public void testCreateCloneIndexFailure() throws ExecutionException, Interrupted .setWaitForActiveShards(0) .setSettings(Settings.builder().put("index.number_of_replicas", 0).putNull("index.blocks.write").build()) .get(); - - Thread.sleep(2000); - ensureYellow("target"); + // waiting more than waitForRemoteStoreSync's sleep time of 30 sec to deterministically fail + Thread.sleep(40000); + ensureRed("target"); + ClusterHealthRequest healthRequest = Requests.clusterHealthRequest() + .waitForNoRelocatingShards(true) + .waitForNoInitializingShards(true); + ClusterHealthResponse actionGet = client().admin().cluster().health(healthRequest).actionGet(); + assertEquals(actionGet.getUnassignedShards(), numPrimaryShards); } catch (ExecutionException | InterruptedException e) { throw new RuntimeException(e); @@ -182,11 +195,12 @@ public void testCreateCloneIndexFailure() throws ExecutionException, Interrupted .cluster() .prepareUpdateSettings() .setTransientSettings( - Settings.builder().put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), (String) null) + Settings.builder() + .put(EnableAllocationDecider.CLUSTER_ROUTING_REBALANCE_ENABLE_SETTING.getKey(), (String) null) + .put(RecoverySettings.INDICES_INTERNAL_REMOTE_UPLOAD_TIMEOUT.getKey(), (String) null) ) .get(); } - } } diff --git a/server/src/internalClusterTest/java/org/opensearch/remotemigration/RemotePrimaryRelocationIT.java b/server/src/internalClusterTest/java/org/opensearch/remotemigration/RemotePrimaryRelocationIT.java index b1c429a45a1a1..4a4057def4207 100644 --- a/server/src/internalClusterTest/java/org/opensearch/remotemigration/RemotePrimaryRelocationIT.java +++ b/server/src/internalClusterTest/java/org/opensearch/remotemigration/RemotePrimaryRelocationIT.java @@ -8,9 +8,8 @@ package org.opensearch.remotemigration; -import com.carrotsearch.randomizedtesting.generators.RandomNumbers; - import org.opensearch.action.DocWriteResponse; +import org.opensearch.action.admin.cluster.health.ClusterHealthRequest; import org.opensearch.action.admin.cluster.health.ClusterHealthResponse; import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesRequest; import org.opensearch.action.admin.cluster.repositories.get.GetRepositoriesResponse; @@ -18,11 +17,13 @@ import org.opensearch.action.delete.DeleteResponse; import org.opensearch.action.index.IndexResponse; import org.opensearch.client.Client; +import org.opensearch.client.Requests; import org.opensearch.cluster.routing.allocation.command.MoveAllocationCommand; import org.opensearch.common.Priority; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.index.query.QueryBuilders; +import org.opensearch.indices.recovery.RecoverySettings; import org.opensearch.plugins.Plugin; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.test.hamcrest.OpenSearchAssertions; @@ -162,12 +163,12 @@ public void testMixedModeRelocation_RemoteSeedingFail() throws Exception { String remoteNode = internalCluster().startNode(); internalCluster().validateClusterFormed(); - // assert repo gets registered - GetRepositoriesRequest gr = new GetRepositoriesRequest(new String[] { REPOSITORY_NAME }); - GetRepositoriesResponse getRepositoriesResponse = client.admin().cluster().getRepositories(gr).actionGet(); - assertEquals(1, getRepositoriesResponse.repositories().size()); - setFailRate(REPOSITORY_NAME, 100); + client().admin() + .cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().put(RecoverySettings.INDICES_INTERNAL_REMOTE_UPLOAD_TIMEOUT.getKey(), "10s")) + .get(); logger.info("--> relocating from {} to {} ", docRepNode, remoteNode); client().admin().cluster().prepareReroute().add(new MoveAllocationCommand("test", 0, docRepNode, remoteNode)).execute().actionGet(); @@ -181,29 +182,23 @@ public void testMixedModeRelocation_RemoteSeedingFail() throws Exception { .actionGet(); assertTrue(clusterHealthResponse.getRelocatingShards() == 1); - setFailRate(REPOSITORY_NAME, 0); - Thread.sleep(RandomNumbers.randomIntBetween(random(), 0, 2000)); - clusterHealthResponse = client().admin() - .cluster() - .prepareHealth() - .setTimeout(TimeValue.timeValueSeconds(45)) - .setWaitForEvents(Priority.LANGUID) - .setWaitForNoRelocatingShards(true) - .execute() - .actionGet(); - assertTrue(clusterHealthResponse.getRelocatingShards() == 0); - logger.info("--> remote to remote relocation complete"); + // waiting more than waitForRemoteStoreSync's sleep time of 30 sec to deterministically fail + Thread.sleep(40000); + + ClusterHealthRequest healthRequest = Requests.clusterHealthRequest() + .waitForNoRelocatingShards(true) + .waitForNoInitializingShards(true); + ClusterHealthResponse actionGet = client().admin().cluster().health(healthRequest).actionGet(); + assertEquals(actionGet.getRelocatingShards(), 0); + assertEquals(docRepNode, primaryNodeName("test")); + finished.set(true); indexingThread.join(); - refresh("test"); - OpenSearchAssertions.assertHitCount(client().prepareSearch("test").setTrackTotalHits(true).get(), numAutoGenDocs.get()); - OpenSearchAssertions.assertHitCount( - client().prepareSearch("test") - .setTrackTotalHits(true)// extra paranoia ;) - .setQuery(QueryBuilders.termQuery("auto", true)) - .get(), - numAutoGenDocs.get() - ); + client().admin() + .cluster() + .prepareUpdateSettings() + .setTransientSettings(Settings.builder().put(RecoverySettings.INDICES_INTERNAL_REMOTE_UPLOAD_TIMEOUT.getKey(), (String) null)) + .get(); } private static Thread getIndexingThread(AtomicBoolean finished, AtomicInteger numAutoGenDocs) { diff --git a/server/src/main/java/org/opensearch/index/shard/IndexShard.java b/server/src/main/java/org/opensearch/index/shard/IndexShard.java index 26dbbbcdee7c0..72a67096fcd9e 100644 --- a/server/src/main/java/org/opensearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/opensearch/index/shard/IndexShard.java @@ -2118,15 +2118,16 @@ public boolean isRemoteSegmentStoreInSync() { return false; } - public void waitForRemoteStoreSync() { + public void waitForRemoteStoreSync() throws IOException { waitForRemoteStoreSync(() -> {}); } /* Blocks the calling thread, waiting for the remote store to get synced till internal Remote Upload Timeout Calls onProgress on seeing an increased file count on remote + Throws IOException if the remote store is not synced within the timeout */ - public void waitForRemoteStoreSync(Runnable onProgress) { + public void waitForRemoteStoreSync(Runnable onProgress) throws IOException { assert indexSettings.isAssignedOnRemoteNode(); RemoteSegmentStoreDirectory directory = getRemoteDirectory(); int segmentUploadeCount = 0; @@ -2138,7 +2139,7 @@ public void waitForRemoteStoreSync(Runnable onProgress) { while (System.nanoTime() - startNanos < getRecoverySettings().internalRemoteUploadTimeout().nanos()) { try { if (isRemoteSegmentStoreInSync()) { - break; + return; } else { if (directory.getSegmentsUploadedToRemoteStore().size() > segmentUploadeCount) { onProgress.run(); @@ -2156,6 +2157,11 @@ public void waitForRemoteStoreSync(Runnable onProgress) { return; } } + throw new IOException( + "Failed to upload to remote segment store within remote upload timeout of " + + getRecoverySettings().internalRemoteUploadTimeout().getMinutes() + + " minutes" + ); } public void preRecovery() { diff --git a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java index f5e342d28fde1..e130f50e105d5 100644 --- a/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java +++ b/server/src/main/java/org/opensearch/index/shard/StoreRecovery.java @@ -193,13 +193,6 @@ void recoverFromLocalShards( indexShard.getEngine().forceMerge(false, -1, false, false, false, UUIDs.randomBase64UUID()); if (indexShard.isRemoteTranslogEnabled() && indexShard.shardRouting.primary()) { indexShard.waitForRemoteStoreSync(); - if (indexShard.isRemoteSegmentStoreInSync() == false) { - throw new IndexShardRecoveryException( - indexShard.shardId(), - "failed to upload to remote", - new IOException("Failed to upload to remote segment store") - ); - } } return true; } catch (IOException ex) { @@ -436,10 +429,6 @@ void recoverFromSnapshotAndRemoteStore( indexShard.finalizeRecovery(); if (indexShard.isRemoteTranslogEnabled() && indexShard.shardRouting.primary()) { indexShard.waitForRemoteStoreSync(); - if (indexShard.isRemoteSegmentStoreInSync() == false) { - listener.onFailure(new IndexShardRestoreFailedException(shardId, "Failed to upload to remote segment store")); - return; - } } indexShard.postRecovery("restore done"); @@ -722,10 +711,6 @@ private void restore( indexShard.finalizeRecovery(); if (indexShard.isRemoteTranslogEnabled() && indexShard.shardRouting.primary()) { indexShard.waitForRemoteStoreSync(); - if (indexShard.isRemoteSegmentStoreInSync() == false) { - listener.onFailure(new IndexShardRestoreFailedException(shardId, "Failed to upload to remote segment store")); - return; - } } indexShard.postRecovery("restore done"); listener.onResponse(true); From 4e58cef69c00aa255c569bf23dc1ee552f208a64 Mon Sep 17 00:00:00 2001 From: Ankit Jain Date: Mon, 22 Apr 2024 18:31:11 +0530 Subject: [PATCH 15/15] Ignoring unavailable shards during search request execution with ignore_available parameter (#13298) Signed-off-by: Ankit Jain --- CHANGELOG.md | 1 + .../search/AbstractSearchAsyncAction.java | 6 ++++-- .../AbstractSearchAsyncActionTests.java | 19 +++++++++++++++++++ 3 files changed, 24 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0594cfea2c89d..98a696c62cfa5 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -44,6 +44,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), - [BWC and API enforcement] Enforcing the presence of API annotations at build time ([#12872](https://github.com/opensearch-project/OpenSearch/pull/12872)) - Improve built-in secure transports support ([#12907](https://github.com/opensearch-project/OpenSearch/pull/12907)) - Update links to documentation in rest-api-spec ([#13043](https://github.com/opensearch-project/OpenSearch/pull/13043)) +- Ignoring unavailable shards during search request execution with ignore_available parameter ([#13298](https://github.com/opensearch-project/OpenSearch/pull/13298)) - Refactoring globMatch using simpleMatchWithNormalizedStrings from Regex ([#13104](https://github.com/opensearch-project/OpenSearch/pull/13104)) - [BWC and API enforcement] Reconsider the breaking changes check policy to detect breaking changes against released versions ([#13292](https://github.com/opensearch-project/OpenSearch/pull/13292)) diff --git a/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java b/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java index 0520a4a7aecec..9bf4a4b1e18f1 100644 --- a/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java +++ b/server/src/main/java/org/opensearch/action/search/AbstractSearchAsyncAction.java @@ -425,8 +425,10 @@ public final void executeNextPhase(SearchPhase currentPhase, SearchPhase nextPha currentPhase.getName() ); } - onPhaseFailure(currentPhase, "Partial shards failure (" + discrepancy + " shards unavailable)", null); - return; + if (!request.indicesOptions().ignoreUnavailable()) { + onPhaseFailure(currentPhase, "Partial shards failure (" + discrepancy + " shards unavailable)", null); + return; + } } } if (logger.isTraceEnabled()) { diff --git a/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java b/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java index 420289d3ff2e5..7dcbf213d6c9d 100644 --- a/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java +++ b/server/src/test/java/org/opensearch/action/search/AbstractSearchAsyncActionTests.java @@ -70,6 +70,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Set; @@ -442,6 +443,24 @@ public void testShardNotAvailableWithDisallowPartialFailures() { assertEquals(0, searchPhaseExecutionException.getSuppressed().length); } + public void testShardNotAvailableWithIgnoreUnavailable() { + SearchRequest searchRequest = new SearchRequest().allowPartialSearchResults(false) + .indicesOptions(new IndicesOptions(EnumSet.of(IndicesOptions.Option.IGNORE_UNAVAILABLE), IndicesOptions.WildcardStates.NONE)); + AtomicReference exception = new AtomicReference<>(); + ActionListener listener = ActionListener.wrap(response -> {}, exception::set); + int numShards = randomIntBetween(2, 10); + ArraySearchPhaseResults phaseResults = new ArraySearchPhaseResults<>(numShards); + AbstractSearchAsyncAction action = createAction(searchRequest, phaseResults, listener, false, new AtomicLong()); + // skip one to avoid the "all shards failed" failure. + SearchShardIterator skipIterator = new SearchShardIterator(null, null, Collections.emptyList(), null); + skipIterator.resetAndSkip(); + action.skipShard(skipIterator); + + // Validate no exception is thrown + action.executeNextPhase(action, createFetchSearchPhase()); + action.sendSearchResponse(InternalSearchResponse.empty(), phaseResults.results); + } + private static ArraySearchPhaseResults phaseResults( Set contextIds, List> nodeLookups,