Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Move FieldMapper#valueFetcher to MappedFieldType (#62974) #63220

Merged
merged 1 commit into from
Oct 4, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -113,6 +113,19 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S
throw new IllegalArgumentException("[rank_feature] fields do not support sorting, scripting or aggregating");
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected Float parseSourceValue(Object value) {
return objectToFloat(value);
}
};
}

@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new IllegalArgumentException("Queries on [rank_feature] fields are not supported");
Expand Down Expand Up @@ -162,27 +175,14 @@ protected void parseCreateField(ParseContext context) throws IOException {
context.doc().addWithKey(name(), new FeatureField("_feature", name(), value));
}

private Float objectToFloat(Object value) {
private static Float objectToFloat(Object value) {
if (value instanceof Number) {
return ((Number) value).floatValue();
} else {
return Float.parseFloat(value.toString());
}
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Float parseSourceValue(Object value) {
return objectToFloat(value);
}
};
}

@Override
protected String contentType() {
return CONTENT_TYPE;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import org.apache.lucene.search.Query;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.lookup.SearchLookup;

import java.util.Collections;

Expand Down Expand Up @@ -50,6 +51,11 @@ public String typeName() {
return CONTENT_TYPE;
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException("Cannot fetch values for internal field [" + typeName() + "].");
}

@Override
public Query existsQuery(QueryShardContext context) {
throw new UnsupportedOperationException("Cannot run exists query on [_feature]");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -88,6 +88,19 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S
throw new IllegalArgumentException("[rank_features] fields do not support sorting, scripting or aggregating");
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}

@Override
public Query termQuery(Object value, QueryShardContext context) {
throw new IllegalArgumentException("Queries on [rank_features] fields are not supported");
Expand Down Expand Up @@ -152,19 +165,6 @@ protected void parseCreateField(ParseContext context) {
throw new AssertionError("parse is implemented directly");
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Object parseSourceValue(Object value) {
return value;
}
};
}

@Override
protected String contentType() {
return CONTENT_TYPE;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -124,7 +124,7 @@ protected List<Parameter<?>> getParameters() {
@Override
public ScaledFloatFieldMapper build(BuilderContext context) {
ScaledFloatFieldType type = new ScaledFloatFieldType(buildFullName(context), indexed.getValue(), stored.getValue(),
hasDocValues.getValue(), meta.getValue(), scalingFactor.getValue());
hasDocValues.getValue(), meta.getValue(), scalingFactor.getValue(), nullValue.getValue());
return new ScaledFloatFieldMapper(name, type, multiFieldsBuilder.build(this, context), copyTo.build(), this);
}
}
Expand All @@ -134,15 +134,17 @@ public ScaledFloatFieldMapper build(BuilderContext context) {
public static final class ScaledFloatFieldType extends SimpleMappedFieldType {

private final double scalingFactor;
private final Double nullValue;

public ScaledFloatFieldType(String name, boolean indexed, boolean stored, boolean hasDocValues,
Map<String, String> meta, double scalingFactor) {
Map<String, String> meta, double scalingFactor, Double nullValue) {
super(name, indexed, stored, hasDocValues, TextSearchInfo.SIMPLE_MATCH_ONLY, meta);
this.scalingFactor = scalingFactor;
this.nullValue = nullValue;
}

public ScaledFloatFieldType(String name, double scalingFactor) {
this(name, true, false, true, Collections.emptyMap(), scalingFactor);
this(name, true, false, true, Collections.emptyMap(), scalingFactor, null);
}

public double getScalingFactor() {
Expand Down Expand Up @@ -218,6 +220,30 @@ public IndexFieldData.Builder fielddataBuilder(String fullyQualifiedIndexName, S
};
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, false) {
@Override
protected Double parseSourceValue(Object value) {
double doubleValue;
if (value.equals("")) {
if (nullValue == null) {
return null;
}
doubleValue = nullValue;
} else {
doubleValue = objectToDouble(value);
}

double scalingFactor = getScalingFactor();
return Math.round(doubleValue * scalingFactor) / scalingFactor;
}
};
}

@Override
public Object valueForDisplay(Object value) {
if (value == null) {
Expand Down Expand Up @@ -394,31 +420,6 @@ private static double objectToDouble(Object value) {
return doubleValue;
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}
return new SourceValueFetcher(name(), mapperService, parsesArrayValue()) {
@Override
protected Double parseSourceValue(Object value) {
double doubleValue;
if (value.equals("")) {
if (nullValue == null) {
return null;
}
doubleValue = nullValue;
} else {
doubleValue = objectToDouble(value);
}

double scalingFactor = fieldType().getScalingFactor();
return Math.round(doubleValue * scalingFactor) / scalingFactor;
}
};
}


private static class ScaledFloatIndexFieldData extends IndexNumericFieldData {

private final IndexNumericFieldData scaledFieldData;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -263,6 +263,11 @@ private ShingleFieldType shingleFieldForPositions(int positions) {
return shingleFields[Math.min(indexFromShingleSize, shingleFields.length - 1)];
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

@Override
public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, boolean caseInsensitive, QueryShardContext context) {
if (prefixField == null || prefixField.termLengthWithinBounds(value.length()) == false) {
Expand Down Expand Up @@ -369,6 +374,11 @@ public Query prefixQuery(String value, MultiTermQuery.RewriteMethod method, bool
.build();
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

@Override
public String typeName() {
return "prefix";
Expand Down Expand Up @@ -405,11 +415,6 @@ protected void parseCreateField(ParseContext context) {
throw new UnsupportedOperationException();
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

@Override
protected void mergeOptions(FieldMapper other, List<String> conflicts) {

Expand Down Expand Up @@ -451,11 +456,6 @@ protected void mergeOptions(FieldMapper other, List<String> conflicts) {

}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

@Override
protected String contentType() {
return "shingle";
Expand All @@ -478,6 +478,11 @@ void setPrefixFieldType(PrefixFieldType prefixFieldType) {
this.prefixFieldType = prefixFieldType;
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

@Override
public String typeName() {
return CONTENT_TYPE;
Expand Down Expand Up @@ -573,11 +578,6 @@ protected void parseCreateField(ParseContext context) throws IOException {
}
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
throw new UnsupportedOperationException();
}

@Override
protected String contentType() {
return CONTENT_TYPE;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,6 @@
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.search.lookup.SearchLookup;

import java.io.IOException;
import java.util.Arrays;
Expand Down Expand Up @@ -79,6 +78,8 @@ public TokenCountFieldMapper build(BuilderContext context) {
index.getValue(),
store.getValue(),
hasDocValues.getValue(),
false,
nullValue.getValue(),
meta.getValue());
return new TokenCountFieldMapper(name, ft, multiFieldsBuilder.build(this, context), copyTo.build(), this);
}
Expand Down Expand Up @@ -129,20 +130,6 @@ protected void parseCreateField(ParseContext context) throws IOException {
);
}

@Override
public ValueFetcher valueFetcher(MapperService mapperService, SearchLookup searchLookup, String format) {
if (format != null) {
throw new IllegalArgumentException("Field [" + name() + "] of type [" + typeName() + "] doesn't support formats.");
}

return new SourceValueFetcher(name(), mapperService, parsesArrayValue(), nullValue) {
@Override
protected String parseSourceValue(Object value) {
return value.toString();
}
};
}

/**
* Count position increments in a token stream. Package private for testing.
* @param analyzer analyzer to create token stream
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,11 +25,8 @@
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.TermQuery;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.List;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.plugins.Plugin;

Expand Down Expand Up @@ -144,13 +141,4 @@ public void testRejectMultiValuedFields() throws MapperParsingException, IOExcep
assertEquals("[rank_feature] fields do not support indexing multiple values for the same field [foo.field] in the same document",
e.getCause().getMessage());
}

public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
RankFeatureFieldMapper mapper = new RankFeatureFieldMapper.Builder("field").build(context);

assertEquals(List.of(3.14f), fetchSourceValue(mapper, 3.14));
assertEquals(List.of(42.9f), fetchSourceValue(mapper, "42.9"));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,11 @@

package org.elasticsearch.index.mapper;

import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.settings.Settings;

import java.io.IOException;
import java.util.Collections;

public class RankFeatureFieldTypeTests extends FieldTypeTestCase {
Expand All @@ -27,4 +32,13 @@ public void testIsNotAggregatable() {
MappedFieldType fieldType = new RankFeatureFieldMapper.RankFeatureFieldType("field", Collections.emptyMap(), true);
assertFalse(fieldType.isAggregatable());
}

public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());
MappedFieldType mapper = new RankFeatureFieldMapper.Builder("field").build(context).fieldType();

assertEquals(Collections.singletonList(3.14f), fetchSourceValue(mapper, 3.14));
assertEquals(Collections.singletonList(42.9f), fetchSourceValue(mapper, "42.9"));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,8 @@

import org.apache.lucene.index.DocValuesType;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
Expand Down Expand Up @@ -285,22 +282,4 @@ public void testRejectIndexOptions() {
containsString("Failed to parse mapping [_doc]: Field [scaling_factor] is required"));
assertWarnings("Parameter [index_options] has no effect on type [scaled_float] and will be removed in future");
}

public void testFetchSourceValue() throws IOException {
Settings settings = Settings.builder().put(IndexMetadata.SETTING_VERSION_CREATED, Version.CURRENT.id).build();
Mapper.BuilderContext context = new Mapper.BuilderContext(settings, new ContentPath());

ScaledFloatFieldMapper mapper = new ScaledFloatFieldMapper.Builder("field", false, false)
.scalingFactor(100)
.build(context);
assertEquals(org.elasticsearch.common.collect.List.of(3.14), fetchSourceValue(mapper, 3.1415926));
assertEquals(org.elasticsearch.common.collect.List.of(3.14), fetchSourceValue(mapper, "3.1415"));
assertEquals(org.elasticsearch.common.collect.List.of(), fetchSourceValue(mapper, ""));

ScaledFloatFieldMapper nullValueMapper = new ScaledFloatFieldMapper.Builder("field", false, false)
.scalingFactor(100)
.nullValue(2.71)
.build(context);
assertEquals(org.elasticsearch.common.collect.List.of(2.71), fetchSourceValue(nullValueMapper, ""));
}
}
Loading