Skip to content

Commit

Permalink
Merge remote-tracking branch 'es/main' into synthetic_source_ip_nativ…
Browse files Browse the repository at this point in the history
…e_arrays
  • Loading branch information
martijnvg committed Feb 21, 2025
2 parents 581fc2a + a895875 commit 2136120
Show file tree
Hide file tree
Showing 48 changed files with 479 additions and 960 deletions.
5 changes: 5 additions & 0 deletions docs/changelog/122938.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
pr: 122938
summary: Fix geoip databases index access after system feature migration (again)
area: Ingest Node
type: bug
issues: []
6 changes: 6 additions & 0 deletions docs/changelog/123010.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 123010
summary: Hold store reference in `InternalEngine#performActionWithDirectoryReader(...)`
area: Engine
type: bug
issues:
- 122974
Original file line number Diff line number Diff line change
Expand Up @@ -123,13 +123,19 @@ public void testGeoIpSystemFeaturesMigration() throws Exception {

// as should a normal get *
assertBusy(() -> testGetStar(List.of("my-index-00001"), maybeSecurityIndex));

// and getting data streams
assertBusy(() -> testGetDatastreams());
} else {
// after the upgrade, but before the migration, Kibana should work
assertBusy(() -> testGetStarAsKibana(List.of("my-index-00001"), maybeSecurityIndex));

// as should a normal get *
assertBusy(() -> testGetStar(List.of("my-index-00001"), maybeSecurityIndex));

// and getting data streams
assertBusy(() -> testGetDatastreams());

// migrate the system features and give the cluster a moment to settle
Request migrateSystemFeatures = new Request("POST", "/_migration/system_features");
assertOK(client().performRequest(migrateSystemFeatures));
Expand All @@ -144,6 +150,9 @@ public void testGeoIpSystemFeaturesMigration() throws Exception {
// as should a normal get *
assertBusy(() -> testGetStar(List.of("my-index-00001"), maybeSecurityIndexReindexed));

// and getting data streams
assertBusy(() -> testGetDatastreams());

Request disableDownloader = new Request("PUT", "/_cluster/settings");
disableDownloader.setJsonEntity("""
{"persistent": {"ingest.geoip.downloader.enabled": false}}
Expand Down Expand Up @@ -257,4 +266,15 @@ private void testGetStarAsKibana(List<String> indexNames, @Nullable List<String>
Map<String, Object> map = responseAsMap(response);
assertThat(map.keySet(), is(new HashSet<>(indexNames)));
}

private void testGetDatastreams() throws IOException {
Request getStar = new Request("GET", "_data_stream");
getStar.setOptions(
RequestOptions.DEFAULT.toBuilder().setWarningsHandler(WarningsHandler.PERMISSIVE) // we don't care about warnings, just errors
);
Response response = client().performRequest(getStar);
assertOK(response);

// note: we don't actually care about the response, just that there was one and that it didn't error out on us
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@
import org.elasticsearch.persistent.PersistentTasksCustomMetadata;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
import org.elasticsearch.threadpool.ThreadPool;
Expand Down Expand Up @@ -341,7 +342,7 @@ private String mockSearches(String databaseName, int firstChunk, int lastChunk)
}

SearchHits hits = SearchHits.unpooled(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f);
SearchResponse searchResponse = new SearchResponse(hits, null, null, false, null, null, 0, null, 1, 1, 0, 1L, null, null);
SearchResponse searchResponse = SearchResponseUtils.successfulResponse(hits);
toRelease.add(searchResponse::decRef);
@SuppressWarnings("unchecked")
ActionFuture<SearchResponse> actionFuture = mock(ActionFuture.class);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -68,6 +68,7 @@
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.tasks.TaskManager;
Expand Down Expand Up @@ -574,22 +575,7 @@ protected RequestWrapper<?> buildRequest(Hit doc) {
new TotalHits(0, TotalHits.Relation.EQUAL_TO),
0
);
SearchResponse searchResponse = new SearchResponse(
hits,
null,
null,
false,
false,
null,
1,
scrollId(),
5,
4,
0,
randomLong(),
null,
SearchResponse.Clusters.EMPTY
);
SearchResponse searchResponse = SearchResponseUtils.response(hits).scrollId(scrollId()).shards(5, 4, 0).build();
try {
client.lastSearch.get().listener.onResponse(searchResponse);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import org.elasticsearch.index.reindex.ScrollableHitSource;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.threadpool.TestThreadPool;
Expand Down Expand Up @@ -166,22 +167,7 @@ private SearchResponse createSearchResponse() {
new TotalHits(0, TotalHits.Relation.EQUAL_TO),
0
);
return new SearchResponse(
hits,
null,
null,
false,
false,
null,
1,
randomSimpleString(random(), 1, 10),
5,
4,
0,
randomLong(),
null,
SearchResponse.Clusters.EMPTY
);
return SearchResponseUtils.response(hits).scrollId(randomSimpleString(random(), 1, 10)).shards(5, 4, 0).build();
}

private void assertSameHits(List<? extends ScrollableHitSource.Hit> actual, SearchHit[] expected) {
Expand Down
36 changes: 33 additions & 3 deletions muted-tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -314,9 +314,6 @@ tests:
issue: https://github.com/elastic/elasticsearch/issues/122913
- class: org.elasticsearch.xpack.search.AsyncSearchSecurityIT
issue: https://github.com/elastic/elasticsearch/issues/122940
- class: org.elasticsearch.action.admin.indices.create.ShrinkIndexIT
method: testShrinkIndexPrimaryTerm
issue: https://github.com/elastic/elasticsearch/issues/122974
- class: org.elasticsearch.test.apmintegration.TracesApmIT
method: testApmIntegration
issue: https://github.com/elastic/elasticsearch/issues/122129
Expand Down Expand Up @@ -365,6 +362,39 @@ tests:
- class: org.elasticsearch.xpack.esql.qa.mixed.MixedClusterEsqlSpecIT
method: test {fork.ForkWithCommonPrefilter SYNC}
issue: https://github.com/elastic/elasticsearch/issues/123109
- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
method: test {p0=esql/40_tsdb/to_string aggregate_metric_double}
issue: https://github.com/elastic/elasticsearch/issues/123116
- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT
method: test {fork.ForkWithCommonPrefilter}
issue: https://github.com/elastic/elasticsearch/issues/123117
- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT
method: test {fork.SimpleFork}
issue: https://github.com/elastic/elasticsearch/issues/123118
- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT
method: test {fork.FiveFork}
issue: https://github.com/elastic/elasticsearch/issues/123119
- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT
method: test {fork.ForkWithWhereSortDescAndLimit}
issue: https://github.com/elastic/elasticsearch/issues/123120
- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
method: test {p0=esql/46_downsample/Render stats from downsampled index}
issue: https://github.com/elastic/elasticsearch/issues/123122
- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
method: test {p0=esql/40_unsupported_types/unsupported}
issue: https://github.com/elastic/elasticsearch/issues/123123
- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
method: test {p0=esql/40_tsdb/render aggregate_metric_double when missing min and max}
issue: https://github.com/elastic/elasticsearch/issues/123124
- class: org.elasticsearch.index.mapper.extras.ScaledFloatFieldMapperTests
method: testBlockLoaderFromRowStrideReader
issue: https://github.com/elastic/elasticsearch/issues/123126
- class: org.elasticsearch.xpack.esql.qa.mixed.EsqlClientYamlIT
method: test {p0=esql/40_tsdb/render aggregate_metric_double when missing value}
issue: https://github.com/elastic/elasticsearch/issues/123130
- class: org.elasticsearch.xpack.esql.ccq.MultiClusterSpecIT
method: test {fork.ForkWithWhereSortAndLimit}
issue: https://github.com/elastic/elasticsearch/issues/123131

# Examples:
#
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,16 +12,13 @@
import org.apache.http.HttpEntity;
import org.apache.http.entity.ContentType;
import org.apache.http.nio.entity.NStringEntity;
import org.apache.lucene.search.TotalHits;
import org.elasticsearch.TransportVersion;
import org.elasticsearch.action.admin.cluster.state.ClusterStateAction;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
import org.elasticsearch.action.search.SearchRequest;
import org.elasticsearch.action.search.SearchResponse;
import org.elasticsearch.action.search.SearchShardsRequest;
import org.elasticsearch.action.search.SearchShardsResponse;
import org.elasticsearch.action.search.ShardSearchFailure;
import org.elasticsearch.action.search.TransportSearchAction;
import org.elasticsearch.action.search.TransportSearchShardsAction;
import org.elasticsearch.client.Request;
Expand All @@ -33,11 +30,11 @@
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.cluster.node.VersionInformation;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.settings.SecureString;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.EsExecutors;
import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.search.aggregations.InternalAggregations;
import org.elasticsearch.test.cluster.ElasticsearchCluster;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.test.rest.ObjectPath;
Expand Down Expand Up @@ -102,21 +99,8 @@ private static MockTransportService startTransport(
EsExecutors.DIRECT_EXECUTOR_SERVICE,
SearchRequest::new,
(request, channel, task) -> {
var searchResponse = new SearchResponse(
SearchHits.empty(new TotalHits(0, TotalHits.Relation.EQUAL_TO), Float.NaN),
InternalAggregations.EMPTY,
null,
false,
null,
null,
1,
null,
1,
1,
0,
100,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY
var searchResponse = SearchResponseUtils.successfulResponse(
SearchHits.empty(Lucene.TOTAL_HITS_EQUAL_TO_ZERO, Float.NaN)
);
try {
channel.sendResponse(searchResponse);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ public class LogsIndexModeRollingUpgradeIT extends AbstractRollingUpgradeTestCas
.module("x-pack-aggregate-metric")
.module("x-pack-stack")
.setting("xpack.security.enabled", "false")
.setting("xpack.license.self_generated.type", "trial")
.setting("xpack.license.self_generated.type", initTestSeed().nextBoolean() ? "trial" : "basic")
// We upgrade from standard to logsdb, so we need to start with logsdb disabled,
// then later cluster.logsdb.enabled gets set to true and next rollover data stream is in logsdb mode.
.setting("cluster.logsdb.enabled", "false")
Expand Down
11 changes: 0 additions & 11 deletions rest-api-spec/src/main/resources/rest-api-spec/api/cat.help.json
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,6 @@
]
}
]
},
"params":{
"help":{
"type":"boolean",
"description":"Return help information",
"default":false
},
"s":{
"type":"list",
"description":"Comma-separated list of column names or column aliases to sort by"
}
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,14 @@
"type":"string",
"description":"a short version of the Accept header, e.g. json, yaml"
},
"local":{
"type":"boolean",
"description":"Return local information, do not retrieve the state from master node (default: false)"
},
"master_timeout":{
"type":"time",
"description":"Explicit operation timeout for connection to master node"
},
"bytes":{
"type":"enum",
"description":"The unit in which to display byte values",
Expand Down
10 changes: 10 additions & 0 deletions rest-api-spec/src/main/resources/rest-api-spec/api/cat.tasks.json
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,16 @@
"type":"boolean",
"description":"Verbose mode. Display column headers",
"default":false
},
"timeout":{
"type":"time",
"default":"30s",
"description":"Period to wait for a response. If no response is received before the timeout expires, the request fails and returns an error."
},
"wait_for_completion":{
"type":"boolean",
"default":false,
"description":"If `true`, the request blocks until the task has completed."
}
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.core.Nullable;
import org.elasticsearch.core.Tuple;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.indices.SystemIndices.SystemIndexAccessLevel;

Expand Down Expand Up @@ -158,7 +159,26 @@ public static boolean isIndexVisible(
if (indexAbstraction.isSystem()) {
// check if it is net new
if (resolver.getNetNewSystemIndexPredicate().test(indexAbstraction.getName())) {
return isSystemIndexVisible(resolver, indexAbstraction);
// don't give this code any particular credit for being *correct*. it's just trying to resolve a combination of
// issues in a way that happens to *work*. there's probably a better way of writing things such that this won't
// be necessary, but for the moment, it happens to be expedient to write things this way.

// unwrap the alias and re-run the function on the write index of the alias -- that is, the alias is visible if
// the concrete index that it refers to is visible
Index writeIndex = indexAbstraction.getWriteIndex();
if (writeIndex == null) {
return false;
} else {
return isIndexVisible(
expression,
selectorString,
writeIndex.getName(),
indicesOptions,
metadata,
resolver,
includeDataStreams
);
}
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3470,7 +3470,9 @@ protected long getPreCommitSegmentGeneration() {
<T> T performActionWithDirectoryReader(SearcherScope scope, CheckedFunction<DirectoryReader, T, IOException> action)
throws EngineException {
assert scope == SearcherScope.INTERNAL : "performActionWithDirectoryReader(...) isn't prepared for external usage";
assert store.hasReferences();
if (store.tryIncRef() == false) {
throw new AlreadyClosedException(shardId + " store is closed", failedEngine.get());
}
try {
ReferenceManager<ElasticsearchDirectoryReader> referenceManager = getReferenceManager(scope);
ElasticsearchDirectoryReader acquire = referenceManager.acquire();
Expand All @@ -3486,6 +3488,8 @@ <T> T performActionWithDirectoryReader(SearcherScope scope, CheckedFunction<Dire
ensureOpen(ex); // throw EngineCloseException here if we are already closed
logger.error("failed to perform action with directory reader", ex);
throw new EngineException(shardId, "failed to perform action with directory reader", ex);
} finally {
store.decRef();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
import org.elasticsearch.search.AbstractSearchTestCase;
import org.elasticsearch.search.SearchHit;
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.SearchResponseUtils;
import org.elasticsearch.search.builder.PointInTimeBuilder;
import org.elasticsearch.search.builder.SearchSourceBuilder;
import org.elasticsearch.search.collapse.CollapseBuilder;
Expand Down Expand Up @@ -173,22 +174,7 @@ public void testFailOneItemFailsEntirePhase() throws IOException {
@Override
void sendExecuteMultiSearch(MultiSearchRequest request, SearchTask task, ActionListener<MultiSearchResponse> listener) {
assertTrue(executedMultiSearch.compareAndSet(false, true));
SearchResponse searchResponse = new SearchResponse(
collapsedHits,
null,
null,
false,
null,
null,
1,
null,
1,
1,
0,
0,
ShardSearchFailure.EMPTY_ARRAY,
SearchResponse.Clusters.EMPTY
);
SearchResponse searchResponse = SearchResponseUtils.successfulResponse(collapsedHits);
ActionListener.respondAndRelease(
listener,
new MultiSearchResponse(
Expand Down
Loading

0 comments on commit 2136120

Please sign in to comment.