Skip to content

Commit

Permalink
Revert "Fix line length offenders in the o.e.search package. (#36223)"
Browse files Browse the repository at this point in the history
This reverts commit 5f3893f.
  • Loading branch information
jpountz committed Dec 6, 2018
1 parent fcde585 commit a67c91e
Show file tree
Hide file tree
Showing 146 changed files with 1,940 additions and 2,776 deletions.
89 changes: 89 additions & 0 deletions buildSrc/src/main/resources/checkstyle_suppressions.xml

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
Expand Up @@ -138,9 +138,8 @@ public ScriptService(Settings settings, Map<String, ScriptEngine> engines, Map<S
this.contexts = Objects.requireNonNull(contexts);

if (Strings.hasLength(settings.get(DISABLE_DYNAMIC_SCRIPTING_SETTING))) {
throw new IllegalArgumentException(DISABLE_DYNAMIC_SCRIPTING_SETTING + " is not a supported setting, replace with " +
"fine-grained script settings. \n Dynamic scripts can be enabled for all languages and all operations not " +
"using `script.disable_dynamic: false` in elasticsearch.yml");
throw new IllegalArgumentException(DISABLE_DYNAMIC_SCRIPTING_SETTING + " is not a supported setting, replace with fine-grained script settings. \n" +
"Dynamic scripts can be enabled for all languages and all operations not using `script.disable_dynamic: false` in elasticsearch.yml");
}

this.typesAllowed = TYPES_ALLOWED_SETTING.exists(settings) ? new HashSet<>() : null;
Expand Down Expand Up @@ -366,8 +365,7 @@ public <FactoryType> FactoryType compile(Script script, ScriptContext<FactoryTyp
// TODO: remove this try-catch completely, when all script engines have good exceptions!
throw good; // its already good
} catch (Exception exception) {
throw new GeneralScriptException("Failed to compile " + type + " script [" + id + "] using lang [" + lang + "]",
exception);
throw new GeneralScriptException("Failed to compile " + type + " script [" + id + "] using lang [" + lang + "]", exception);
}

// Since the cache key is the script content itself we don't need to
Expand Down
54 changes: 18 additions & 36 deletions server/src/main/java/org/elasticsearch/search/MultiValueMode.java
Original file line number Diff line number Diff line change
Expand Up @@ -63,8 +63,7 @@ protected long pick(SortedNumericDocValues values) throws IOException {
}

@Override
protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc,
int maxChildren) throws IOException {
protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException {
int totalCount = 0;
long totalValue = 0;
int count = 0;
Expand Down Expand Up @@ -95,8 +94,7 @@ protected double pick(SortedNumericDoubleValues values) throws IOException {
}

@Override
protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc,
int maxChildren) throws IOException {
protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException {
int totalCount = 0;
double totalValue = 0;
int count = 0;
Expand Down Expand Up @@ -132,8 +130,7 @@ protected long pick(SortedNumericDocValues values) throws IOException {
}

@Override
protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc,
int maxChildren) throws IOException {
protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException {
int totalCount = 0;
long totalValue = 0;
int count = 0;
Expand Down Expand Up @@ -166,8 +163,7 @@ protected double pick(SortedNumericDoubleValues values) throws IOException {
}

@Override
protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc,
int maxChildren) throws IOException {
protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException {
int totalCount = 0;
double totalValue = 0;
int count = 0;
Expand Down Expand Up @@ -231,8 +227,7 @@ protected long pick(SortedNumericDocValues values) throws IOException {
}

@Override
protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc,
int maxChildren) throws IOException {
protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException {
boolean hasValue = false;
long minValue = Long.MAX_VALUE;
int count = 0;
Expand All @@ -254,8 +249,7 @@ protected double pick(SortedNumericDoubleValues values) throws IOException {
}

@Override
protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc,
int maxChildren) throws IOException {
protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException {
boolean hasValue = false;
double minValue = Double.POSITIVE_INFINITY;
int count = 0;
Expand All @@ -277,8 +271,7 @@ protected BytesRef pick(SortedBinaryDocValues values) throws IOException {
}

@Override
protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc,
int maxChildren) throws IOException {
protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException {
BytesRefBuilder bytesRefBuilder = null;
int count = 0;
for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) {
Expand Down Expand Up @@ -340,8 +333,7 @@ protected long pick(SortedNumericDocValues values) throws IOException {
}

@Override
protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc,
int maxChildren) throws IOException {
protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException {
boolean hasValue = false;
long maxValue = Long.MIN_VALUE;
int count = 0;
Expand Down Expand Up @@ -371,8 +363,7 @@ protected double pick(SortedNumericDoubleValues values) throws IOException {
}

@Override
protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc,
int maxChildren) throws IOException {
protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException {
boolean hasValue = false;
double maxValue = Double.NEGATIVE_INFINITY;
int count = 0;
Expand Down Expand Up @@ -402,8 +393,7 @@ protected BytesRef pick(SortedBinaryDocValues values) throws IOException {
}

@Override
protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc,
int maxChildren) throws IOException {
protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException {
BytesRefBuilder bytesRefBuilder = null;
int count = 0;
for (int doc = startDoc; doc < endDoc; doc = docItr.nextDoc()) {
Expand Down Expand Up @@ -518,8 +508,7 @@ protected long pick(SortedNumericDocValues values) throws IOException {
* NOTE: Calling the returned instance on docs that are not root docs is illegal
* The returned instance can only be evaluate the current and upcoming docs
*/
public NumericDocValues select(final SortedNumericDocValues values, final long missingValue, final BitSet parentDocs,
final DocIdSetIterator childDocs, int maxDoc, int maxChildren) throws IOException {
public NumericDocValues select(final SortedNumericDocValues values, final long missingValue, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxDoc, int maxChildren) throws IOException {
if (parentDocs == null || childDocs == null) {
return FieldData.replaceMissing(DocValues.emptyNumeric(), missingValue);
}
Expand Down Expand Up @@ -563,8 +552,7 @@ public long longValue() {
};
}

protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc,
int maxChildren) throws IOException {
protected long pick(SortedNumericDocValues values, long missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException {
throw new IllegalArgumentException("Unsupported sort mode: " + this);
}

Expand Down Expand Up @@ -617,8 +605,7 @@ protected double pick(SortedNumericDoubleValues values) throws IOException {
* NOTE: Calling the returned instance on docs that are not root docs is illegal
* The returned instance can only be evaluate the current and upcoming docs
*/
public NumericDoubleValues select(final SortedNumericDoubleValues values, final double missingValue, final BitSet parentDocs,
final DocIdSetIterator childDocs, int maxDoc, int maxChildren) throws IOException {
public NumericDoubleValues select(final SortedNumericDoubleValues values, final double missingValue, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxDoc, int maxChildren) throws IOException {
if (parentDocs == null || childDocs == null) {
return FieldData.replaceMissing(FieldData.emptyNumericDouble(), missingValue);
}
Expand Down Expand Up @@ -654,8 +641,7 @@ public double doubleValue() throws IOException {
};
}

protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc,
int maxChildren) throws IOException {
protected double pick(SortedNumericDoubleValues values, double missingValue, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException {
throw new IllegalArgumentException("Unsupported sort mode: " + this);
}

Expand Down Expand Up @@ -727,8 +713,7 @@ protected BytesRef pick(SortedBinaryDocValues values) throws IOException {
* NOTE: Calling the returned instance on docs that are not root docs is illegal
* The returned instance can only be evaluate the current and upcoming docs
*/
public BinaryDocValues select(final SortedBinaryDocValues values, final BytesRef missingValue, final BitSet parentDocs,
final DocIdSetIterator childDocs, int maxDoc, int maxChildren) throws IOException {
public BinaryDocValues select(final SortedBinaryDocValues values, final BytesRef missingValue, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxDoc, int maxChildren) throws IOException {
if (parentDocs == null || childDocs == null) {
return select(FieldData.emptySortedBinary(), missingValue);
}
Expand Down Expand Up @@ -771,8 +756,7 @@ public BytesRef binaryValue() throws IOException {
};
}

protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc,
int maxChildren) throws IOException {
protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSetIterator docItr, int startDoc, int endDoc, int maxChildren) throws IOException {
throw new IllegalArgumentException("Unsupported sort mode: " + this);
}

Expand All @@ -784,8 +768,7 @@ protected BytesRef pick(BinaryDocValues values, BytesRefBuilder builder, DocIdSe
*/
public SortedDocValues select(final SortedSetDocValues values) {
if (values.getValueCount() >= Integer.MAX_VALUE) {
throw new UnsupportedOperationException("fields containing more than " + (Integer.MAX_VALUE - 1) +
" unique terms are unsupported");
throw new UnsupportedOperationException("fields containing more than " + (Integer.MAX_VALUE - 1) + " unique terms are unsupported");
}

final SortedDocValues singleton = DocValues.unwrapSingleton(values);
Expand Down Expand Up @@ -846,8 +829,7 @@ protected int pick(SortedSetDocValues values) throws IOException {
* NOTE: Calling the returned instance on docs that are not root docs is illegal
* The returned instance can only be evaluate the current and upcoming docs
*/
public SortedDocValues select(final SortedSetDocValues values, final BitSet parentDocs, final DocIdSetIterator childDocs,
int maxChildren) throws IOException {
public SortedDocValues select(final SortedSetDocValues values, final BitSet parentDocs, final DocIdSetIterator childDocs, int maxChildren) throws IOException {
if (parentDocs == null || childDocs == null) {
return select(DocValues.emptySortedSet());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -397,8 +397,7 @@ private void resolvePipelineAggregatorOrder(Map<String, AggregationBuilder> aggB
}
// Check the pipeline sub-aggregator factories
if (!foundSubBuilder && (i == bucketsPathElements.size() - 1)) {
Collection<PipelineAggregationBuilder> subPipelineBuilders = aggBuilder.factoriesBuilder
.pipelineAggregatorBuilders;
Collection<PipelineAggregationBuilder> subPipelineBuilders = aggBuilder.factoriesBuilder.pipelineAggregatorBuilders;
for (PipelineAggregationBuilder subFactory : subPipelineBuilders) {
if (aggName.equals(subFactory.getName())) {
foundSubBuilder = true;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,8 @@
import java.util.List;
import java.util.Map;

public abstract class InternalMultiBucketAggregation<A extends InternalMultiBucketAggregation,
B extends InternalMultiBucketAggregation.InternalBucket>
public abstract class InternalMultiBucketAggregation<A extends InternalMultiBucketAggregation, B extends InternalMultiBucketAggregation.InternalBucket>
extends InternalAggregation implements MultiBucketsAggregation {

public InternalMultiBucketAggregation(String name, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) {
super(name, pipelineAggregators, metaData);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,8 +116,8 @@ public final int bucketDocCount(long bucketOrd) {
if (bucketOrd >= docCounts.size()) {
// This may happen eg. if no document in the highest buckets is accepted by a sub aggregator.
// For example, if there is a long terms agg on 3 terms 1,2,3 with a sub filter aggregator and if no document with 3 as a value
// matches the filter, then the filter will never collect bucket ord 3. However, the long terms agg will call
// bucketAggregations(3) on the filter aggregator anyway to build sub-aggregations.
// matches the filter, then the filter will never collect bucket ord 3. However, the long terms agg will call bucketAggregations(3)
// on the filter aggregator anyway to build sub-aggregations.
return 0;
} else {
return docCounts.get(bucketOrd);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,8 @@ public class GeoHashGridAggregator extends BucketsAggregator {
private final LongHash bucketOrds;

GeoHashGridAggregator(String name, AggregatorFactories factories, GeoGridAggregationBuilder.CellIdSource valuesSource,
int requiredSize, int shardSize, SearchContext aggregationContext, Aggregator parent,
List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData) throws IOException {
int requiredSize, int shardSize, SearchContext aggregationContext, Aggregator parent, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData) throws IOException {
super(name, factories, aggregationContext, parent, pipelineAggregators, metaData);
this.valuesSource = valuesSource;
this.requiredSize = requiredSize;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -146,8 +146,7 @@ public InternalAggregation buildAggregation(long bucket) throws IOException {
if (minDocCount == 0) {
emptyBucketInfo = new EmptyBucketInfo(interval, offset, minBound, maxBound, buildEmptySubAggregations());
}
return new InternalHistogram(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, pipelineAggregators(),
metaData());
return new InternalHistogram(name, buckets, order, minDocCount, emptyBucketInfo, formatter, keyed, pipelineAggregators(), metaData());
}

@Override
Expand All @@ -156,8 +155,7 @@ public InternalAggregation buildEmptyAggregation() {
if (minDocCount == 0) {
emptyBucketInfo = new EmptyBucketInfo(interval, offset, minBound, maxBound, buildEmptySubAggregations());
}
return new InternalHistogram(name, Collections.emptyList(), order, minDocCount, emptyBucketInfo, formatter, keyed,
pipelineAggregators(), metaData());
return new InternalHistogram(name, Collections.emptyList(), order, minDocCount, emptyBucketInfo, formatter, keyed, pipelineAggregators(), metaData());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,8 @@ public void collect(int doc, long bucket) throws IOException {

@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException {
return new InternalMissing(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal),
pipelineAggregators(), metaData());
return new InternalMissing(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), pipelineAggregators(),
metaData());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,8 +94,8 @@ public void collect(int childDoc, long bucket) throws IOException {

@Override
public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException {
return new InternalReverseNested(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal),
pipelineAggregators(), metaData());
return new InternalReverseNested(name, bucketDocCount(owningBucketOrdinal), bucketAggregations(owningBucketOrdinal), pipelineAggregators(),
metaData());
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -330,8 +330,7 @@ public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOE
Range range = ranges[i];
final long bucketOrd = subBucketOrdinal(owningBucketOrdinal, i);
org.elasticsearch.search.aggregations.bucket.range.Range.Bucket bucket =
rangeFactory.createBucket(range.key, range.from, range.to, bucketDocCount(bucketOrd),
bucketAggregations(bucketOrd), keyed, format);
rangeFactory.createBucket(range.key, range.from, range.to, bucketDocCount(bucketOrd), bucketAggregations(bucketOrd), keyed, format);
buckets.add(bucket);
}
// value source can be null in the case of unmapped fields
Expand Down Expand Up @@ -362,8 +361,7 @@ public static class Unmapped<R extends RangeAggregator.Range> extends NonCollect
@SuppressWarnings("unchecked")
public Unmapped(String name, R[] ranges, boolean keyed, DocValueFormat format,
SearchContext context,
Aggregator parent, InternalRange.Factory factory, List<PipelineAggregator> pipelineAggregators,
Map<String, Object> metaData)
Aggregator parent, InternalRange.Factory factory, List<PipelineAggregator> pipelineAggregators, Map<String, Object> metaData)
throws IOException {

super(name, context, parent, pipelineAggregators, metaData);
Expand Down
Loading

0 comments on commit a67c91e

Please sign in to comment.